diff --git a/.dockerignore b/.dockerignore index 87e525d..8724601 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,60 +1,60 @@ -# Node.js -node_modules/ -npm-debug.log* -yarn-debug.log* -yarn-error.log* -.pnpm-debug.log* - -# Build outputs -dist/ -build/ -.turbo/ -.next/ - -# Environment files -.env -.env.local -.env.development.local -.env.test.local -.env.production.local - -# IDE -.vscode/ -.idea/ -*.swp -*.swo - -# OS -.DS_Store -Thumbs.db - -# Logs -logs/ -*.log - -# Git -.git/ -.gitignore - -# Documentation -README.md -DOCKER.md -docs/ - -# Docker -Dockerfile* -docker-compose* -.dockerignore - -# Cache -.cache/ -.temp/ -.tmp/ - -# Test coverage -coverage/ -.nyc_output/ - -# Misc -*.tgz -*.tar.gz +# Node.js +node_modules/ +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# Build outputs +dist/ +build/ +.turbo/ +.next/ + +# Environment files +.env +.env.local +.env.development.local +.env.test.local +.env.production.local + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db + +# Logs +logs/ +*.log + +# Git +.git/ +.gitignore + +# Documentation +README.md +DOCKER.md +docs/ + +# Docker +Dockerfile* +docker-compose* +.dockerignore + +# Cache +.cache/ +.temp/ +.tmp/ + +# Test coverage +coverage/ +.nyc_output/ + +# Misc +*.tgz +*.tar.gz diff --git a/.gitignore b/.gitignore index 911611e..6079a5f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,112 +1,112 @@ -# Dependencies -node_modules/ -.pnp -.pnp.js - -# Production builds -dist/ -build/ -.next/ -*.js.map -*.d.ts - - -# Environment variables -.env -.env.local -.env.development.local -.env.test.local -.env.production.local - -# Logs -npm-debug.log* -yarn-debug.log* -yarn-error.log* -bun-debug.log* -bun-error.log* -*.log - -# Runtime data -pids -*.pid -*.seed -*.pid.lock -*.tsbuildinfo - -# Coverage directory used by tools like istanbul -coverage/ -*.lcov - -# Dependency directories -.pnpm-store/ - -# Optional npm cache directory -.npm - -# Optional eslint cache -.eslintcache - -# Optional stylelint cache -.stylelintcache - -# Microbundle cache -.rpt2_cache/ -.rts2_cache_cjs/ -.rts2_cache_es/ -.rts2_cache_umd/ - -# Optional REPL history -.node_repl_history - -# Output of 'npm pack' -*.tgz - -# Yarn Integrity file -.yarn-integrity - -# parcel-bundler cache (https://parceljs.org/) -.cache -.parcel-cache - -# Next.js build output -.next - -# Nuxt.js build / generate output -.nuxt - -# Storybook build outputs -.out -.storybook-out - -# Temporary folders -tmp/ -temp/ - -# Editor directories and files -.idea/ -*.swp -*.swo -*~ - -# OS generated files -.DS_Store -.DS_Store? -._* -.Spotlight-V100 -.Trashes -ehthumbs.db -Thumbs.db - -# Trading bot specific -.data/ -.backtest-results/ -.logs/ -.old/ -.mongo/ -.chat/ -*.db -*.sqlite -*.sqlite3 - -# Turbo -.turbo +# Dependencies +node_modules/ +.pnp +.pnp.js + +# Production builds +dist/ +build/ +.next/ +*.js.map +*.d.ts + + +# Environment variables +.env +.env.local +.env.development.local +.env.test.local +.env.production.local + +# Logs +npm-debug.log* +yarn-debug.log* +yarn-error.log* +bun-debug.log* +bun-error.log* +*.log + +# Runtime data +pids +*.pid +*.seed +*.pid.lock +*.tsbuildinfo + +# Coverage directory used by tools like istanbul +coverage/ +*.lcov + +# Dependency directories +.pnpm-store/ + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next + +# Nuxt.js build / generate output +.nuxt + +# Storybook build outputs +.out +.storybook-out + +# Temporary folders +tmp/ +temp/ + +# Editor directories and files +.idea/ +*.swp +*.swo +*~ + +# OS generated files +.DS_Store +.DS_Store? +._* +.Spotlight-V100 +.Trashes +ehthumbs.db +Thumbs.db + +# Trading bot specific +.data/ +.backtest-results/ +.logs/ +.old/ +.mongo/ +.chat/ +*.db +*.sqlite +*.sqlite3 + +# Turbo +.turbo diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ce9f46a..a70399f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,19 +1,19 @@ -stages: -- build -- test -- deploy -- review -- dast -- staging -- canary -- production -- incremental rollout 10% -- incremental rollout 25% -- incremental rollout 50% -- incremental rollout 100% -- performance -- cleanup -sast: - stage: test -include: -- template: Auto-DevOps.gitlab-ci.yml +stages: +- build +- test +- deploy +- review +- dast +- staging +- canary +- production +- incremental rollout 10% +- incremental rollout 25% +- incremental rollout 50% +- incremental rollout 100% +- performance +- cleanup +sast: + stage: test +include: +- template: Auto-DevOps.gitlab-ci.yml diff --git a/.vscode/settings.json b/.vscode/settings.json index 2d845b6..311fea7 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,24 +1,24 @@ -{ - "yaml.schemas": { - "https://raw.githubusercontent.com/compose-spec/compose-spec/master/schema/compose-spec.json": [ - "docker-compose*.yml", - "docker-compose*.yaml" - ], - "https://json.schemastore.org/grafana-dashboard-5.x.json": [ - "monitoring/grafana/provisioning/datasources/*.yml" - ], - "https://json.schemastore.org/kustomization.json": [ - "k8s/**/kustomization.yml" - ] - }, - "yaml.customTags": [ - "!datasources", - "!dashboard", - "!notification", - "!template" - ], - "yaml.validate": true, - "yaml.completion": true, - "yaml.hover": true, - "yaml.format.enable": true +{ + "yaml.schemas": { + "https://raw.githubusercontent.com/compose-spec/compose-spec/master/schema/compose-spec.json": [ + "docker-compose*.yml", + "docker-compose*.yaml" + ], + "https://json.schemastore.org/grafana-dashboard-5.x.json": [ + "monitoring/grafana/provisioning/datasources/*.yml" + ], + "https://json.schemastore.org/kustomization.json": [ + "k8s/**/kustomization.yml" + ] + }, + "yaml.customTags": [ + "!datasources", + "!dashboard", + "!notification", + "!template" + ], + "yaml.validate": true, + "yaml.completion": true, + "yaml.hover": true, + "yaml.format.enable": true } \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json index e0861e3..ae4bd98 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -1,19 +1,19 @@ -{ - "version": "2.0.0", - "tasks": [ - { - "label": "Start Data Service", - "type": "shell", - "command": "bun", - "args": [ - "run", - "dev" - ], - "group": "build", - "isBackground": true, - "problemMatcher": [ - "$tsc" - ] - } - ] -} +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Start Data Service", + "type": "shell", + "command": "bun", + "args": [ + "run", + "dev" + ], + "group": "build", + "isBackground": true, + "problemMatcher": [ + "$tsc" + ] + } + ] +} diff --git a/DEVELOPMENT-ROADMAP.md b/DEVELOPMENT-ROADMAP.md index 56cc19b..2810b43 100644 --- a/DEVELOPMENT-ROADMAP.md +++ b/DEVELOPMENT-ROADMAP.md @@ -1,377 +1,377 @@ -# πŸ“‹ Stock Bot Development Roadmap - -*Last Updated: June 2025* - -## 🎯 Overview - -This document outlines the development plan for the Stock Bot platform, focusing on building a robust data pipeline from market data providers through processing layers to trading execution. The plan emphasizes establishing solid foundational layers before adding advanced features. - -## πŸ—οΈ Architecture Philosophy - -``` -Raw Data β†’ Clean Data β†’ Insights β†’ Strategies β†’ Execution β†’ Monitoring -``` - -Our approach prioritizes: -- **Data Quality First**: Clean, validated data is the foundation -- **Incremental Complexity**: Start simple, add sophistication gradually -- **Monitoring Everything**: Observability at each layer -- **Fault Tolerance**: Graceful handling of failures and data gaps - ---- - -## πŸ“Š Phase 1: Data Foundation Layer (Current Focus) - -### 1.1 Data Service & Providers βœ… **In Progress** - -**Current Status**: Basic structure in place, needs enhancement - -**Core Components**: -- `apps/data-service` - Central data orchestration service -- Provider implementations: - - `providers/yahoo.provider.ts` βœ… Basic implementation - - `providers/quotemedia.provider.ts` βœ… Basic implementation - - `providers/proxy.provider.ts` βœ… Proxy/fallback logic - -**Immediate Tasks**: - -1. **Enhance Provider Reliability** - ```typescript - // libs/data-providers (NEW LIBRARY NEEDED) - interface DataProvider { - getName(): string; - getQuote(symbol: string): Promise; - getHistorical(symbol: string, period: TimePeriod): Promise; - isHealthy(): Promise; - getRateLimit(): RateLimitInfo; - } - ``` - -2. **Add Rate Limiting & Circuit Breakers** - - Implement in `libs/http` client - - Add provider-specific rate limits - - Circuit breaker pattern for failed providers - -3. **Data Validation Layer** - ```typescript - // libs/data-validation (NEW LIBRARY NEEDED) - - Price reasonableness checks - - Volume validation - - Timestamp validation - - Missing data detection - ``` - -4. **Provider Registry Enhancement** - - Dynamic provider switching - - Health-based routing - - Cost optimization (free β†’ paid fallback) - -### 1.2 Raw Data Storage - -**Storage Strategy**: -- **QuestDB**: Real-time market data (OHLCV, quotes) -- **MongoDB**: Provider responses, metadata, configurations -- **PostgreSQL**: Processed/clean data, trading records - -**Schema Design**: -```sql --- QuestDB Time-Series Tables -raw_quotes (timestamp, symbol, provider, bid, ask, last, volume) -raw_ohlcv (timestamp, symbol, provider, open, high, low, close, volume) -provider_health (timestamp, provider, latency, success_rate, error_rate) - --- MongoDB Collections -provider_responses: { provider, symbol, timestamp, raw_response, status } -data_quality_metrics: { symbol, date, completeness, accuracy, issues[] } -``` - -**Immediate Implementation**: -1. Enhance `libs/questdb-client` with streaming inserts -2. Add data retention policies -3. Implement data compression strategies - ---- - -## 🧹 Phase 2: Data Processing & Quality Layer - -### 2.1 Data Cleaning Service ⚑ **Next Priority** - -**New Service**: `apps/processing-service` - -**Core Responsibilities**: -1. **Data Normalization** - - Standardize timestamps (UTC) - - Normalize price formats - - Handle split/dividend adjustments - -2. **Quality Checks** - - Outlier detection (price spikes, volume anomalies) - - Gap filling strategies - - Cross-provider validation - -3. **Data Enrichment** - - Calculate derived metrics (returns, volatility) - - Add technical indicators - - Market session classification - -**Library Enhancements Needed**: - -```typescript -// libs/data-frame (ENHANCE EXISTING) -class MarketDataFrame { - // Add time-series specific operations - fillGaps(strategy: GapFillStrategy): MarketDataFrame; - detectOutliers(method: OutlierMethod): OutlierReport; - normalize(): MarketDataFrame; - calculateReturns(period: number): MarketDataFrame; -} - -// libs/data-quality (NEW LIBRARY) -interface QualityMetrics { - completeness: number; - accuracy: number; - timeliness: number; - consistency: number; - issues: QualityIssue[]; -} -``` - -### 2.2 Technical Indicators Library - -**Enhance**: `libs/strategy-engine` or create `libs/technical-indicators` - -**Initial Indicators**: -- Moving averages (SMA, EMA, VWAP) -- Momentum (RSI, MACD, Stochastic) -- Volatility (Bollinger Bands, ATR) -- Volume (OBV, Volume Profile) - -```typescript -// Implementation approach -interface TechnicalIndicator { - name: string; - calculate(data: OHLCV[]): T[]; - getSignal(current: T, previous: T[]): Signal; -} -``` - ---- - -## 🧠 Phase 3: Analytics & Strategy Layer - -### 3.1 Strategy Engine Enhancement - -**Current**: Basic structure exists in `libs/strategy-engine` - -**Enhancements Needed**: - -1. **Strategy Framework** - ```typescript - abstract class TradingStrategy { - abstract analyze(data: MarketData): StrategySignal[]; - abstract getRiskParams(): RiskParameters; - backtest(historicalData: MarketData[]): BacktestResults; - } - ``` - -2. **Signal Generation** - - Entry/exit signals - - Position sizing recommendations - - Risk-adjusted scores - -3. **Strategy Types to Implement**: - - Mean reversion - - Momentum/trend following - - Statistical arbitrage - - Volume-based strategies - -### 3.2 Backtesting Engine - -**New Service**: Enhanced `apps/strategy-service` - -**Features**: -- Historical simulation -- Performance metrics calculation -- Risk analysis -- Strategy comparison - ---- - -## ⚑ Phase 4: Execution Layer - -### 4.1 Portfolio Management - -**Enhance**: `apps/portfolio-service` - -**Core Features**: -- Position tracking -- Risk monitoring -- P&L calculation -- Margin management - -### 4.2 Order Management - -**New Service**: `apps/order-service` - -**Responsibilities**: -- Order validation -- Execution routing -- Fill reporting -- Trade reconciliation - -### 4.3 Risk Management - -**New Library**: `libs/risk-engine` - -**Risk Controls**: -- Position limits -- Drawdown limits -- Correlation limits -- Volatility scaling - ---- - -## πŸ“š Library Improvements Roadmap - -### Immediate (Phase 1-2) - -1. **`libs/http`** βœ… **Current Priority** - - [ ] Rate limiting middleware - - [ ] Circuit breaker pattern - - [ ] Request/response caching - - [ ] Retry strategies with exponential backoff - -2. **`libs/questdb-client`** - - [ ] Streaming insert optimization - - [ ] Batch insert operations - - [ ] Connection pooling - - [ ] Query result caching - -3. **`libs/logger`** βœ… **Recently Updated** - - [x] Migrated to `getLogger()` pattern - - [ ] Performance metrics logging - - [ ] Structured trading event logging - -4. **`libs/data-frame`** - - [ ] Time-series operations - - [ ] Financial calculations - - [ ] Memory optimization for large datasets - -### Medium Term (Phase 3) - -5. **`libs/cache`** - - [ ] Market data caching strategies - - [ ] Cache warming for frequently accessed symbols - - [ ] Distributed caching support - -6. **`libs/config`** - - [ ] Strategy-specific configurations - - [ ] Dynamic configuration updates - - [ ] Environment-specific overrides - -### Long Term (Phase 4+) - -7. **`libs/vector-engine`** - - [ ] Market similarity analysis - - [ ] Pattern recognition - - [ ] Correlation analysis - ---- - -## 🎯 Immediate Next Steps (Next 2 Weeks) - -### Week 1: Data Provider Hardening -1. **Enhance HTTP Client** (`libs/http`) - - Implement rate limiting - - Add circuit breaker pattern - - Add comprehensive error handling - -2. **Provider Reliability** (`apps/data-service`) - - Add health checks for all providers - - Implement fallback logic - - Add provider performance monitoring - -3. **Data Validation** - - Create `libs/data-validation` - - Implement basic price/volume validation - - Add data quality metrics - -### Week 2: Processing Foundation -1. **Start Processing Service** (`apps/processing-service`) - - Basic data cleaning pipeline - - Outlier detection - - Gap filling strategies - -2. **QuestDB Optimization** (`libs/questdb-client`) - - Implement streaming inserts - - Add batch operations - - Optimize for time-series data - -3. **Technical Indicators** - - Start `libs/technical-indicators` - - Implement basic indicators (SMA, EMA, RSI) - ---- - -## πŸ“Š Success Metrics - -### Phase 1 Completion Criteria -- [ ] 99.9% data provider uptime -- [ ] <500ms average data latency -- [ ] Zero data quality issues for major symbols -- [ ] All providers monitored and health-checked - -### Phase 2 Completion Criteria -- [ ] Automated data quality scoring -- [ ] Gap-free historical data for 100+ symbols -- [ ] Real-time technical indicator calculation -- [ ] Processing latency <100ms - -### Phase 3 Completion Criteria -- [ ] 5+ implemented trading strategies -- [ ] Comprehensive backtesting framework -- [ ] Performance analytics dashboard - ---- - -## 🚨 Risk Mitigation - -### Data Risks -- **Provider Failures**: Multi-provider fallback strategy -- **Data Quality**: Automated validation and alerting -- **Rate Limits**: Smart request distribution - -### Technical Risks -- **Scalability**: Horizontal scaling design -- **Latency**: Optimize critical paths early -- **Data Loss**: Comprehensive backup strategies - -### Operational Risks -- **Monitoring**: Full observability stack (Grafana, Loki, Prometheus) -- **Alerting**: Critical issue notifications -- **Documentation**: Keep architecture docs current - ---- - -## πŸ’‘ Innovation Opportunities - -### Machine Learning Integration -- Predictive models for data quality -- Anomaly detection in market data -- Strategy parameter optimization - -### Real-time Processing -- Stream processing with Kafka/Pulsar -- Event-driven architecture -- WebSocket data feeds - -### Advanced Analytics -- Market microstructure analysis -- Alternative data integration -- Cross-asset correlation analysis - ---- - -*This roadmap is a living document that will evolve as we learn and adapt. Focus remains on building solid foundations before adding complexity.* - -**Next Review**: End of June 2025 +# πŸ“‹ Stock Bot Development Roadmap + +*Last Updated: June 2025* + +## 🎯 Overview + +This document outlines the development plan for the Stock Bot platform, focusing on building a robust data pipeline from market data providers through processing layers to trading execution. The plan emphasizes establishing solid foundational layers before adding advanced features. + +## πŸ—οΈ Architecture Philosophy + +``` +Raw Data β†’ Clean Data β†’ Insights β†’ Strategies β†’ Execution β†’ Monitoring +``` + +Our approach prioritizes: +- **Data Quality First**: Clean, validated data is the foundation +- **Incremental Complexity**: Start simple, add sophistication gradually +- **Monitoring Everything**: Observability at each layer +- **Fault Tolerance**: Graceful handling of failures and data gaps + +--- + +## πŸ“Š Phase 1: Data Foundation Layer (Current Focus) + +### 1.1 Data Service & Providers βœ… **In Progress** + +**Current Status**: Basic structure in place, needs enhancement + +**Core Components**: +- `apps/data-service` - Central data orchestration service +- Provider implementations: + - `providers/yahoo.provider.ts` βœ… Basic implementation + - `providers/quotemedia.provider.ts` βœ… Basic implementation + - `providers/proxy.provider.ts` βœ… Proxy/fallback logic + +**Immediate Tasks**: + +1. **Enhance Provider Reliability** + ```typescript + // libs/data-providers (NEW LIBRARY NEEDED) + interface DataProvider { + getName(): string; + getQuote(symbol: string): Promise; + getHistorical(symbol: string, period: TimePeriod): Promise; + isHealthy(): Promise; + getRateLimit(): RateLimitInfo; + } + ``` + +2. **Add Rate Limiting & Circuit Breakers** + - Implement in `libs/http` client + - Add provider-specific rate limits + - Circuit breaker pattern for failed providers + +3. **Data Validation Layer** + ```typescript + // libs/data-validation (NEW LIBRARY NEEDED) + - Price reasonableness checks + - Volume validation + - Timestamp validation + - Missing data detection + ``` + +4. **Provider Registry Enhancement** + - Dynamic provider switching + - Health-based routing + - Cost optimization (free β†’ paid fallback) + +### 1.2 Raw Data Storage + +**Storage Strategy**: +- **QuestDB**: Real-time market data (OHLCV, quotes) +- **MongoDB**: Provider responses, metadata, configurations +- **PostgreSQL**: Processed/clean data, trading records + +**Schema Design**: +```sql +-- QuestDB Time-Series Tables +raw_quotes (timestamp, symbol, provider, bid, ask, last, volume) +raw_ohlcv (timestamp, symbol, provider, open, high, low, close, volume) +provider_health (timestamp, provider, latency, success_rate, error_rate) + +-- MongoDB Collections +provider_responses: { provider, symbol, timestamp, raw_response, status } +data_quality_metrics: { symbol, date, completeness, accuracy, issues[] } +``` + +**Immediate Implementation**: +1. Enhance `libs/questdb-client` with streaming inserts +2. Add data retention policies +3. Implement data compression strategies + +--- + +## 🧹 Phase 2: Data Processing & Quality Layer + +### 2.1 Data Cleaning Service ⚑ **Next Priority** + +**New Service**: `apps/processing-service` + +**Core Responsibilities**: +1. **Data Normalization** + - Standardize timestamps (UTC) + - Normalize price formats + - Handle split/dividend adjustments + +2. **Quality Checks** + - Outlier detection (price spikes, volume anomalies) + - Gap filling strategies + - Cross-provider validation + +3. **Data Enrichment** + - Calculate derived metrics (returns, volatility) + - Add technical indicators + - Market session classification + +**Library Enhancements Needed**: + +```typescript +// libs/data-frame (ENHANCE EXISTING) +class MarketDataFrame { + // Add time-series specific operations + fillGaps(strategy: GapFillStrategy): MarketDataFrame; + detectOutliers(method: OutlierMethod): OutlierReport; + normalize(): MarketDataFrame; + calculateReturns(period: number): MarketDataFrame; +} + +// libs/data-quality (NEW LIBRARY) +interface QualityMetrics { + completeness: number; + accuracy: number; + timeliness: number; + consistency: number; + issues: QualityIssue[]; +} +``` + +### 2.2 Technical Indicators Library + +**Enhance**: `libs/strategy-engine` or create `libs/technical-indicators` + +**Initial Indicators**: +- Moving averages (SMA, EMA, VWAP) +- Momentum (RSI, MACD, Stochastic) +- Volatility (Bollinger Bands, ATR) +- Volume (OBV, Volume Profile) + +```typescript +// Implementation approach +interface TechnicalIndicator { + name: string; + calculate(data: OHLCV[]): T[]; + getSignal(current: T, previous: T[]): Signal; +} +``` + +--- + +## 🧠 Phase 3: Analytics & Strategy Layer + +### 3.1 Strategy Engine Enhancement + +**Current**: Basic structure exists in `libs/strategy-engine` + +**Enhancements Needed**: + +1. **Strategy Framework** + ```typescript + abstract class TradingStrategy { + abstract analyze(data: MarketData): StrategySignal[]; + abstract getRiskParams(): RiskParameters; + backtest(historicalData: MarketData[]): BacktestResults; + } + ``` + +2. **Signal Generation** + - Entry/exit signals + - Position sizing recommendations + - Risk-adjusted scores + +3. **Strategy Types to Implement**: + - Mean reversion + - Momentum/trend following + - Statistical arbitrage + - Volume-based strategies + +### 3.2 Backtesting Engine + +**New Service**: Enhanced `apps/strategy-service` + +**Features**: +- Historical simulation +- Performance metrics calculation +- Risk analysis +- Strategy comparison + +--- + +## ⚑ Phase 4: Execution Layer + +### 4.1 Portfolio Management + +**Enhance**: `apps/portfolio-service` + +**Core Features**: +- Position tracking +- Risk monitoring +- P&L calculation +- Margin management + +### 4.2 Order Management + +**New Service**: `apps/order-service` + +**Responsibilities**: +- Order validation +- Execution routing +- Fill reporting +- Trade reconciliation + +### 4.3 Risk Management + +**New Library**: `libs/risk-engine` + +**Risk Controls**: +- Position limits +- Drawdown limits +- Correlation limits +- Volatility scaling + +--- + +## πŸ“š Library Improvements Roadmap + +### Immediate (Phase 1-2) + +1. **`libs/http`** βœ… **Current Priority** + - [ ] Rate limiting middleware + - [ ] Circuit breaker pattern + - [ ] Request/response caching + - [ ] Retry strategies with exponential backoff + +2. **`libs/questdb-client`** + - [ ] Streaming insert optimization + - [ ] Batch insert operations + - [ ] Connection pooling + - [ ] Query result caching + +3. **`libs/logger`** βœ… **Recently Updated** + - [x] Migrated to `getLogger()` pattern + - [ ] Performance metrics logging + - [ ] Structured trading event logging + +4. **`libs/data-frame`** + - [ ] Time-series operations + - [ ] Financial calculations + - [ ] Memory optimization for large datasets + +### Medium Term (Phase 3) + +5. **`libs/cache`** + - [ ] Market data caching strategies + - [ ] Cache warming for frequently accessed symbols + - [ ] Distributed caching support + +6. **`libs/config`** + - [ ] Strategy-specific configurations + - [ ] Dynamic configuration updates + - [ ] Environment-specific overrides + +### Long Term (Phase 4+) + +7. **`libs/vector-engine`** + - [ ] Market similarity analysis + - [ ] Pattern recognition + - [ ] Correlation analysis + +--- + +## 🎯 Immediate Next Steps (Next 2 Weeks) + +### Week 1: Data Provider Hardening +1. **Enhance HTTP Client** (`libs/http`) + - Implement rate limiting + - Add circuit breaker pattern + - Add comprehensive error handling + +2. **Provider Reliability** (`apps/data-service`) + - Add health checks for all providers + - Implement fallback logic + - Add provider performance monitoring + +3. **Data Validation** + - Create `libs/data-validation` + - Implement basic price/volume validation + - Add data quality metrics + +### Week 2: Processing Foundation +1. **Start Processing Service** (`apps/processing-service`) + - Basic data cleaning pipeline + - Outlier detection + - Gap filling strategies + +2. **QuestDB Optimization** (`libs/questdb-client`) + - Implement streaming inserts + - Add batch operations + - Optimize for time-series data + +3. **Technical Indicators** + - Start `libs/technical-indicators` + - Implement basic indicators (SMA, EMA, RSI) + +--- + +## πŸ“Š Success Metrics + +### Phase 1 Completion Criteria +- [ ] 99.9% data provider uptime +- [ ] <500ms average data latency +- [ ] Zero data quality issues for major symbols +- [ ] All providers monitored and health-checked + +### Phase 2 Completion Criteria +- [ ] Automated data quality scoring +- [ ] Gap-free historical data for 100+ symbols +- [ ] Real-time technical indicator calculation +- [ ] Processing latency <100ms + +### Phase 3 Completion Criteria +- [ ] 5+ implemented trading strategies +- [ ] Comprehensive backtesting framework +- [ ] Performance analytics dashboard + +--- + +## 🚨 Risk Mitigation + +### Data Risks +- **Provider Failures**: Multi-provider fallback strategy +- **Data Quality**: Automated validation and alerting +- **Rate Limits**: Smart request distribution + +### Technical Risks +- **Scalability**: Horizontal scaling design +- **Latency**: Optimize critical paths early +- **Data Loss**: Comprehensive backup strategies + +### Operational Risks +- **Monitoring**: Full observability stack (Grafana, Loki, Prometheus) +- **Alerting**: Critical issue notifications +- **Documentation**: Keep architecture docs current + +--- + +## πŸ’‘ Innovation Opportunities + +### Machine Learning Integration +- Predictive models for data quality +- Anomaly detection in market data +- Strategy parameter optimization + +### Real-time Processing +- Stream processing with Kafka/Pulsar +- Event-driven architecture +- WebSocket data feeds + +### Advanced Analytics +- Market microstructure analysis +- Alternative data integration +- Cross-asset correlation analysis + +--- + +*This roadmap is a living document that will evolve as we learn and adapt. Focus remains on building solid foundations before adding complexity.* + +**Next Review**: End of June 2025 diff --git a/SETUP-COMPLETE.md b/SETUP-COMPLETE.md index 9f9b345..7a3f559 100644 --- a/SETUP-COMPLETE.md +++ b/SETUP-COMPLETE.md @@ -1,161 +1,161 @@ -# πŸš€ Trading Bot Docker Infrastructure Setup Complete! - -Your Docker infrastructure has been successfully configured. Here's what you have: - -## πŸ“¦ What's Included - -### Core Services -- **πŸ‰ Dragonfly**: Redis-compatible cache and event streaming (Port 6379) -- **🐘 PostgreSQL**: Operational database with complete trading schema (Port 5432) -- **πŸ“Š QuestDB**: Time-series database for market data (Ports 9000, 8812, 9009) -- **πŸƒ MongoDB**: Document storage for sentiment analysis and raw documents (Port 27017) - -### Admin Tools -- **πŸ”§ Redis Insight**: Dragonfly management GUI (Port 8001) -- **πŸ› οΈ PgAdmin**: PostgreSQL administration (Port 8080) -- **πŸƒ Mongo Express**: MongoDB document browser (Port 8081) - -### Monitoring (Optional) -- **πŸ“ˆ Prometheus**: Metrics collection (Port 9090) -- **πŸ“Š Grafana**: Dashboards and alerting (Port 3000) - -## 🏁 Getting Started - -### Step 1: Start Docker Desktop -Make sure Docker Desktop is running on your Windows machine. - -### Step 2: Start Infrastructure -```powershell -# Quick start - core services only -npm run infra:up - -# Or with management script -npm run docker:start - -# Full development environment -npm run dev:full -``` - -### Step 3: Access Admin Interfaces -```powershell -# Start admin tools -npm run docker:admin -``` - -## πŸ”— Access URLs - -Once running, access these services: - -| Service | URL | Login | -|---------|-----|-------| -| **QuestDB Console** | http://localhost:9000 | No login required | -| **Redis Insight** | http://localhost:8001 | No login required | -| **Bull Board** | http://localhost:3001 | No login required | -| **PgAdmin** | http://localhost:8080 | `admin@tradingbot.local` / `admin123` | -| **Mongo Express** | http://localhost:8081 | `admin` / `admin123` | -| **Prometheus** | http://localhost:9090 | No login required | -| **Grafana** | http://localhost:3000 | `admin` / `admin123` | -| **Bull Board** | http://localhost:3001 | No login required | - -## πŸ“Š Database Connections - -### From Your Trading Services -Update your `.env` file: -```env -# Dragonfly (Redis replacement) -DRAGONFLY_HOST=localhost -DRAGONFLY_PORT=6379 - -# PostgreSQL -POSTGRES_HOST=localhost -POSTGRES_PORT=5432 -POSTGRES_DB=trading_bot -POSTGRES_USER=trading_user -POSTGRES_PASSWORD=trading_pass_dev - -# QuestDB -QUESTDB_HOST=localhost -QUESTDB_PORT=8812 -QUESTDB_DB=qdb - -# MongoDB -MONGODB_HOST=localhost -MONGODB_PORT=27017 -MONGODB_DB=trading_documents -MONGODB_USER=trading_admin -MONGODB_PASSWORD=trading_mongo_dev -``` - -### Database Schema -PostgreSQL includes these pre-configured schemas: -- `trading.*` - Orders, positions, executions, accounts -- `strategy.*` - Strategies, signals, performance metrics -- `risk.*` - Risk limits, events, monitoring -- `audit.*` - System events, health checks, configuration - -## πŸ› οΈ Management Commands - -```powershell -# Basic operations -npm run docker:start # Start core services -npm run docker:stop # Stop all services -npm run docker:status # Check service status -npm run docker:logs # View all logs -npm run docker:reset # Reset all data (destructive!) - -# Additional services -npm run docker:admin # Start admin interfaces -npm run docker:monitoring # Start Prometheus & Grafana - -# Development workflows -npm run dev:full # Infrastructure + admin + your services -npm run dev:clean # Reset + restart everything - -# Direct PowerShell script access -./scripts/docker.ps1 start -./scripts/docker.ps1 logs -Service dragonfly -./scripts/docker.ps1 help -``` - -## βœ… Next Steps - -1. **Start Docker Desktop** if not already running -2. **Run**: `npm run docker:start` to start core infrastructure -3. **Run**: `npm run docker:admin` to start admin tools -4. **Update** your environment variables to use the Docker services -5. **Test** Dragonfly connection in your EventPublisher service -6. **Verify** database schema in PgAdmin -7. **Start** your trading services with the new infrastructure - -## 🎯 Ready for Integration - -Your EventPublisher service is already configured to use Dragonfly. The infrastructure supports: - -- βœ… **Event Streaming**: Dragonfly handles Redis Streams for real-time events -- βœ… **Caching**: High-performance caching with better memory efficiency -- βœ… **Operational Data**: PostgreSQL with complete trading schemas -- βœ… **Time-Series Data**: QuestDB for market data and analytics -- βœ… **Monitoring**: Full observability stack ready -- βœ… **Admin Tools**: Web-based management interfaces - -The system is designed to scale from development to production with the same Docker configuration. - -## πŸ”§ Troubleshooting - -If you encounter issues: -```powershell -# Check Docker status -docker --version -docker-compose --version - -# Verify services -npm run docker:status - -# View specific service logs -./scripts/docker.ps1 logs -Service dragonfly - -# Reset if needed -npm run docker:reset -``` - -**Happy Trading! πŸš€πŸ“ˆ** +# πŸš€ Trading Bot Docker Infrastructure Setup Complete! + +Your Docker infrastructure has been successfully configured. Here's what you have: + +## πŸ“¦ What's Included + +### Core Services +- **πŸ‰ Dragonfly**: Redis-compatible cache and event streaming (Port 6379) +- **🐘 PostgreSQL**: Operational database with complete trading schema (Port 5432) +- **πŸ“Š QuestDB**: Time-series database for market data (Ports 9000, 8812, 9009) +- **πŸƒ MongoDB**: Document storage for sentiment analysis and raw documents (Port 27017) + +### Admin Tools +- **πŸ”§ Redis Insight**: Dragonfly management GUI (Port 8001) +- **πŸ› οΈ PgAdmin**: PostgreSQL administration (Port 8080) +- **πŸƒ Mongo Express**: MongoDB document browser (Port 8081) + +### Monitoring (Optional) +- **πŸ“ˆ Prometheus**: Metrics collection (Port 9090) +- **πŸ“Š Grafana**: Dashboards and alerting (Port 3000) + +## 🏁 Getting Started + +### Step 1: Start Docker Desktop +Make sure Docker Desktop is running on your Windows machine. + +### Step 2: Start Infrastructure +```powershell +# Quick start - core services only +npm run infra:up + +# Or with management script +npm run docker:start + +# Full development environment +npm run dev:full +``` + +### Step 3: Access Admin Interfaces +```powershell +# Start admin tools +npm run docker:admin +``` + +## πŸ”— Access URLs + +Once running, access these services: + +| Service | URL | Login | +|---------|-----|-------| +| **QuestDB Console** | http://localhost:9000 | No login required | +| **Redis Insight** | http://localhost:8001 | No login required | +| **Bull Board** | http://localhost:3001 | No login required | +| **PgAdmin** | http://localhost:8080 | `admin@tradingbot.local` / `admin123` | +| **Mongo Express** | http://localhost:8081 | `admin` / `admin123` | +| **Prometheus** | http://localhost:9090 | No login required | +| **Grafana** | http://localhost:3000 | `admin` / `admin123` | +| **Bull Board** | http://localhost:3001 | No login required | + +## πŸ“Š Database Connections + +### From Your Trading Services +Update your `.env` file: +```env +# Dragonfly (Redis replacement) +DRAGONFLY_HOST=localhost +DRAGONFLY_PORT=6379 + +# PostgreSQL +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DB=trading_bot +POSTGRES_USER=trading_user +POSTGRES_PASSWORD=trading_pass_dev + +# QuestDB +QUESTDB_HOST=localhost +QUESTDB_PORT=8812 +QUESTDB_DB=qdb + +# MongoDB +MONGODB_HOST=localhost +MONGODB_PORT=27017 +MONGODB_DB=trading_documents +MONGODB_USER=trading_admin +MONGODB_PASSWORD=trading_mongo_dev +``` + +### Database Schema +PostgreSQL includes these pre-configured schemas: +- `trading.*` - Orders, positions, executions, accounts +- `strategy.*` - Strategies, signals, performance metrics +- `risk.*` - Risk limits, events, monitoring +- `audit.*` - System events, health checks, configuration + +## πŸ› οΈ Management Commands + +```powershell +# Basic operations +npm run docker:start # Start core services +npm run docker:stop # Stop all services +npm run docker:status # Check service status +npm run docker:logs # View all logs +npm run docker:reset # Reset all data (destructive!) + +# Additional services +npm run docker:admin # Start admin interfaces +npm run docker:monitoring # Start Prometheus & Grafana + +# Development workflows +npm run dev:full # Infrastructure + admin + your services +npm run dev:clean # Reset + restart everything + +# Direct PowerShell script access +./scripts/docker.ps1 start +./scripts/docker.ps1 logs -Service dragonfly +./scripts/docker.ps1 help +``` + +## βœ… Next Steps + +1. **Start Docker Desktop** if not already running +2. **Run**: `npm run docker:start` to start core infrastructure +3. **Run**: `npm run docker:admin` to start admin tools +4. **Update** your environment variables to use the Docker services +5. **Test** Dragonfly connection in your EventPublisher service +6. **Verify** database schema in PgAdmin +7. **Start** your trading services with the new infrastructure + +## 🎯 Ready for Integration + +Your EventPublisher service is already configured to use Dragonfly. The infrastructure supports: + +- βœ… **Event Streaming**: Dragonfly handles Redis Streams for real-time events +- βœ… **Caching**: High-performance caching with better memory efficiency +- βœ… **Operational Data**: PostgreSQL with complete trading schemas +- βœ… **Time-Series Data**: QuestDB for market data and analytics +- βœ… **Monitoring**: Full observability stack ready +- βœ… **Admin Tools**: Web-based management interfaces + +The system is designed to scale from development to production with the same Docker configuration. + +## πŸ”§ Troubleshooting + +If you encounter issues: +```powershell +# Check Docker status +docker --version +docker-compose --version + +# Verify services +npm run docker:status + +# View specific service logs +./scripts/docker.ps1 logs -Service dragonfly + +# Reset if needed +npm run docker:reset +``` + +**Happy Trading! πŸš€πŸ“ˆ** diff --git a/SIMPLIFIED-ARCHITECTURE.md b/SIMPLIFIED-ARCHITECTURE.md index bfaf979..dccaa23 100644 --- a/SIMPLIFIED-ARCHITECTURE.md +++ b/SIMPLIFIED-ARCHITECTURE.md @@ -1,825 +1,825 @@ -# Stock Bot - System Architecture - -> **Updated**: June 2025 - -## Overview - -TypeScript microservices architecture for automated stock trading with real-time data processing and multi-database storage. - -## Core Services - -``` -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Data Service β”‚ β”‚Processing Serviceβ”‚ β”‚Strategy Service β”‚ -β”‚ β€’ Market Data │────▢│ β€’ Indicators │────▢│ β€’ Strategies β”‚ -β”‚ β€’ Providers β”‚ β”‚ β€’ Analytics β”‚ β”‚ β€’ Backtesting β”‚ -β”‚ β€’ QuestDB β”‚ β”‚ β€’ Validation β”‚ β”‚ β€’ Signal Gen β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ - β”‚ β”‚ β”‚ - β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ - └──────────────▢│ Event Bus β”‚β—€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ - β”‚ (Dragonfly) β”‚ - β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ - β”‚ -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚Execution Serviceβ”‚ β”‚Portfolio Serviceβ”‚ β”‚ Dashboard β”‚ -β”‚ β€’ Order Mgmt β”‚ β”‚ β€’ Positions β”‚ β”‚ β€’ Angular UI β”‚ -β”‚ β€’ Risk Control β”‚ β”‚ β€’ Risk Mgmt β”‚ β”‚ β€’ Real-time β”‚ -β”‚ β€’ Execution β”‚ β”‚ β€’ Performance β”‚ β”‚ β€’ Analytics β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ -``` - -## Services Structure - -``` -stock-bot/ -β”œβ”€β”€ apps/ -β”‚ β”œβ”€β”€ data-service/ # Market data ingestion & storage -β”‚ β”œβ”€β”€ execution-service/ # Order execution & broker integration -β”‚ β”œβ”€β”€ portfolio-service/ # Position & risk management -β”‚ β”œβ”€β”€ processing-service/ # Data processing & indicators -β”‚ β”œβ”€β”€ strategy-service/ # Trading strategies & backtesting -β”‚ └── dashboard/ # Angular UI (port 4200) -β”‚ -β”œβ”€β”€ libs/ # Shared libraries -β”‚ β”œβ”€β”€ logger/ # Centralized logging w/ Loki -β”‚ β”œβ”€β”€ config/ # Configuration management -β”‚ β”œβ”€β”€ event-bus/ # Event system -β”‚ β”œβ”€β”€ mongodb-client/ # MongoDB operations -β”‚ β”œβ”€β”€ postgres-client/ # PostgreSQL operations -β”‚ β”œβ”€β”€ questdb-client/ # Time-series data -β”‚ β”œβ”€β”€ http/ # HTTP client w/ proxy support -β”‚ β”œβ”€β”€ cache/ # Caching layer -β”‚ └── utils/ # Common utilities -β”‚ -└── database/ # Database configurations - β”œβ”€β”€ mongodb/init/ - └── postgres/init/ -``` - -## Technology Stack - -| Component | Technology | Purpose | -|-----------|------------|---------| -| **Runtime** | Bun | Fast JavaScript runtime | -| **Language** | TypeScript | Type-safe development | -| **Databases** | PostgreSQL, MongoDB, QuestDB | Multi-database architecture | -| **Caching** | Dragonfly (Redis) | Event bus & caching | -| **Frontend** | Angular 18 | Modern reactive UI | -| **Monitoring** | Prometheus, Grafana, Loki | Observability stack | - -## Quick Start - -```bash -# Install dependencies -bun install - -# Start infrastructure -bun run infra:up - -# Start services -bun run dev - -# Access dashboard -# http://localhost:4200 -``` - -## Key Features - -- **Real-time Trading**: Live market data & order execution -- **Multi-Database**: PostgreSQL, MongoDB, QuestDB for different data types -- **Event-Driven**: Asynchronous communication via Dragonfly -- **Monitoring**: Full observability with metrics, logs, and tracing -- **Modular**: Shared libraries for common functionality -- **Type-Safe**: Full TypeScript coverage -β”‚ β”œβ”€β”€ processing-service/ # Combined processing & indicators -β”‚ β”‚ β”œβ”€β”€ src/ -β”‚ β”‚ β”‚ β”œβ”€β”€ indicators/ # Technical indicators (uses @stock-bot/utils) -β”‚ β”‚ β”‚ β”œβ”€β”€ processors/ # Data processing pipeline -β”‚ β”‚ β”‚ β”œβ”€β”€ vectorized/ # Vectorized calculations -β”‚ β”‚ β”‚ β”œβ”€β”€ services/ -β”‚ β”‚ β”‚ └── index.ts -β”‚ β”‚ └── package.json -β”‚ β”‚ -β”‚ β”œβ”€β”€ strategy-service/ # Combined strategy & backtesting -β”‚ β”‚ β”œβ”€β”€ src/ -β”‚ β”‚ β”‚ β”œβ”€β”€ strategies/ # Strategy implementations -β”‚ β”‚ β”‚ β”œβ”€β”€ backtesting/ # Multi-mode backtesting engine -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ modes/ # Backtesting modes -β”‚ β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ live-mode.ts # Live trading mode -β”‚ β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ event-mode.ts # Event-driven backtest -β”‚ β”‚ β”‚ β”‚ β”‚ └── vector-mode.ts # Vectorized backtest -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ engines/ # Execution engines -β”‚ β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ event-engine.ts # Event-based simulation -β”‚ β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ vector-engine.ts # Vectorized calculations -β”‚ β”‚ β”‚ β”‚ β”‚ └── hybrid-engine.ts # Combined validation -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ simulator.ts # Market simulator -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ runner.ts # Backtest orchestrator -β”‚ β”‚ β”‚ β”‚ └── metrics.ts # Performance analysis -β”‚ β”‚ β”‚ β”œβ”€β”€ live/ # Live strategy execution -β”‚ β”‚ β”‚ β”œβ”€β”€ framework/ # Strategy framework -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ base-strategy.ts -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ execution-mode.ts -β”‚ β”‚ β”‚ β”‚ └── mode-factory.ts -β”‚ β”‚ β”‚ └── index.ts -β”‚ β”‚ └── package.json -β”‚ β”‚ -β”‚ β”œβ”€β”€ execution-service/ # Combined order execution & simulation -β”‚ β”‚ β”œβ”€β”€ src/ -β”‚ β”‚ β”‚ β”œβ”€β”€ brokers/ # Live broker adapters -β”‚ β”‚ β”‚ β”œβ”€β”€ simulation/ # Simulated execution -β”‚ β”‚ β”‚ β”œβ”€β”€ unified/ # Unified execution interface -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ executor.ts # Abstract executor -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ live-executor.ts -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ sim-executor.ts -β”‚ β”‚ β”‚ β”‚ └── vector-executor.ts -β”‚ β”‚ β”‚ └── index.ts -β”‚ β”‚ └── package.json -β”‚ β”‚ -β”‚ β”œβ”€β”€ portfolio-service/ # Combined portfolio & risk management -β”‚ β”‚ β”œβ”€β”€ src/ -β”‚ β”‚ β”‚ β”œβ”€β”€ portfolio/ # Portfolio tracking -β”‚ β”‚ β”‚ β”œβ”€β”€ risk/ # Risk management (uses @stock-bot/utils) -β”‚ β”‚ β”‚ β”œβ”€β”€ positions/ # Position management -β”‚ β”‚ β”‚ β”œβ”€β”€ performance/ # Performance tracking -β”‚ β”‚ β”‚ └── index.ts -β”‚ β”‚ └── package.json -β”‚ β”‚ -β”‚ └── dashboard/ # Combined API & reporting -β”‚ β”œβ”€β”€ src/ -β”‚ β”‚ β”œβ”€β”€ api/ # REST API -β”‚ β”‚ β”œβ”€β”€ web/ # Web interface (Angular) -β”‚ β”‚ β”œβ”€β”€ reports/ # Report generation -β”‚ β”‚ β”œβ”€β”€ websockets/ # Real-time updates -β”‚ β”‚ └── index.ts -β”‚ └── package.json -β”‚ -β”œβ”€β”€ libs/ # βœ… Your existing shared libraries -β”‚ β”œβ”€β”€ config/ # βœ… Environment configuration -β”‚ β”œβ”€β”€ http/ # βœ… HTTP utilities -β”‚ β”œβ”€β”€ logger/ # βœ… Loki-integrated logging -β”‚ β”œβ”€β”€ mongodb-client/ # βœ… MongoDB operations -β”‚ β”œβ”€β”€ postgres-client/ # βœ… PostgreSQL operations -β”‚ β”œβ”€β”€ questdb-client/ # βœ… Time-series data -β”‚ β”œβ”€β”€ types/ # βœ… Shared TypeScript types -β”‚ β”œβ”€β”€ utils/ # βœ… Calculations & utilities -β”‚ β”œβ”€β”€ event-bus/ # πŸ†• Dragonfly event system -β”‚ β”œβ”€β”€ strategy-engine/ # πŸ†• Strategy framework -β”‚ β”œβ”€β”€ vector-engine/ # πŸ†• Vectorized calculations -β”‚ └── data-frame/ # πŸ†• DataFrame operations -``` - -## Multi-Mode Backtesting Architecture - -### 1. Execution Mode Framework - -```typescript -export abstract class ExecutionMode { - protected logger = createLogger(this.constructor.name); - protected config = new ServiceConfig(); - - abstract name: string; - abstract executeOrder(order: Order): Promise; - abstract getCurrentTime(): Date; - abstract getMarketData(symbol: string): Promise; - abstract publishEvent(event: string, data: any): Promise; -} - -export enum BacktestMode { - LIVE = 'live', - EVENT_DRIVEN = 'event-driven', - VECTORIZED = 'vectorized', - HYBRID = 'hybrid' -} -``` - -### 2. Live Trading Mode - -```typescript -export class LiveMode extends ExecutionMode { - name = 'live'; - private broker = new BrokerClient(this.config.getBrokerConfig()); - private eventBus = new EventBus(); - - async executeOrder(order: Order): Promise { - this.logger.info('Executing live order', { orderId: order.id }); - - // Execute via real broker - const result = await this.broker.placeOrder(order); - - // Publish to event bus - await this.eventBus.publish('order.executed', result); - - return result; - } - - getCurrentTime(): Date { - return new Date(); // Real time - } - - async getMarketData(symbol: string): Promise { - // Get live market data - return await this.marketDataService.getLiveData(symbol); - } - - async publishEvent(event: string, data: any): Promise { - await this.eventBus.publish(event, data); - } -} -``` - -### 3. Event-Driven Backtesting Mode - -```typescript -export class EventBacktestMode extends ExecutionMode { - name = 'event-driven'; - private simulator = new MarketSimulator(); - private eventBus = new InMemoryEventBus(); // In-memory for simulation - private simulationTime: Date; - private historicalData: Map; - - constructor(private config: BacktestConfig) { - super(); - this.simulationTime = config.startDate; - } - - async executeOrder(order: Order): Promise { - this.logger.debug('Simulating order execution', { - orderId: order.id, - simulationTime: this.simulationTime - }); - - // Realistic order simulation with slippage, fees - const result = await this.simulator.executeOrder(order, { - currentTime: this.simulationTime, - marketData: await this.getMarketData(order.symbol), - slippageModel: this.config.slippageModel, - commissionModel: this.config.commissionModel - }); - - // Publish to simulation event bus - await this.eventBus.publish('order.executed', result); - - return result; - } - - getCurrentTime(): Date { - return this.simulationTime; - } - - async getMarketData(symbol: string): Promise { - const data = this.historicalData.get(symbol) || []; - return data.find(d => d.timestamp <= this.simulationTime) || null; - } - - async publishEvent(event: string, data: any): Promise { - await this.eventBus.publish(event, data); - } - - // Progress simulation time - advanceTime(newTime: Date): void { - this.simulationTime = newTime; - } -} -``` - -### 4. Vectorized Backtesting Mode - -```typescript -export class VectorBacktestMode extends ExecutionMode { - name = 'vectorized'; - private dataFrame: DataFrame; - private currentIndex: number = 0; - - constructor(private config: VectorBacktestConfig) { - super(); - this.dataFrame = new DataFrame(config.historicalData); - } - - // Vectorized execution - processes entire dataset at once - async executeVectorizedBacktest(strategy: VectorizedStrategy): Promise { - const startTime = Date.now(); - - this.logger.info('Starting vectorized backtest', { - strategy: strategy.name, - dataPoints: this.dataFrame.length - }); - - // Generate all signals at once using your utils library - const signals = this.generateVectorizedSignals(strategy); - - // Calculate performance metrics vectorized - const performance = this.calculateVectorizedPerformance(signals); - - // Apply trading costs if specified - if (this.config.tradingCosts) { - this.applyTradingCosts(performance, signals); - } - - const executionTime = Date.now() - startTime; - - this.logger.info('Vectorized backtest completed', { - executionTime, - totalReturn: performance.totalReturn, - sharpeRatio: performance.sharpeRatio - }); - - return { - mode: 'vectorized', - strategy: strategy.name, - performance, - executionTime, - signals - }; - } - - private generateVectorizedSignals(strategy: VectorizedStrategy): DataFrame { - const prices = this.dataFrame.get('close'); - - // Use your existing technical indicators from @stock-bot/utils - const indicators = { - sma20: sma(prices, 20), - sma50: sma(prices, 50), - rsi: rsi(prices, 14), - macd: macd(prices) - }; - - // Generate position signals vectorized - const positions = strategy.generatePositions(this.dataFrame, indicators); - - return new DataFrame({ - ...this.dataFrame.toObject(), - ...indicators, - positions - }); - } - - private calculateVectorizedPerformance(signals: DataFrame): PerformanceMetrics { - const prices = signals.get('close'); - const positions = signals.get('positions'); - - // Calculate returns vectorized - const returns = prices.slice(1).map((price, i) => - (price - prices[i]) / prices[i] - ); - - // Strategy returns = position[t-1] * market_return[t] - const strategyReturns = returns.map((ret, i) => - (positions[i] || 0) * ret - ); - - // Use your existing performance calculation utilities - return { - totalReturn: calculateTotalReturn(strategyReturns), - sharpeRatio: calculateSharpeRatio(strategyReturns), - maxDrawdown: calculateMaxDrawdown(strategyReturns), - volatility: calculateVolatility(strategyReturns), - winRate: calculateWinRate(strategyReturns) - }; - } - - // Standard interface methods (not used in vectorized mode) - async executeOrder(order: Order): Promise { - throw new Error('Use executeVectorizedBacktest for vectorized mode'); - } - - getCurrentTime(): Date { - return this.dataFrame.getTimestamp(this.currentIndex); - } - - async getMarketData(symbol: string): Promise { - return this.dataFrame.getRow(this.currentIndex); - } - - async publishEvent(event: string, data: any): Promise { - // No-op for vectorized mode - } -} -``` - -### 5. Hybrid Validation Mode - -```typescript -export class HybridBacktestMode extends ExecutionMode { - name = 'hybrid'; - private eventMode: EventBacktestMode; - private vectorMode: VectorBacktestMode; - - constructor(config: BacktestConfig) { - super(); - this.eventMode = new EventBacktestMode(config); - this.vectorMode = new VectorBacktestMode(config); - } - - async validateStrategy( - strategy: BaseStrategy, - tolerance: number = 0.001 - ): Promise { - - this.logger.info('Starting hybrid validation', { - strategy: strategy.name, - tolerance - }); - - // Run vectorized backtest (fast) - const vectorResult = await this.vectorMode.executeVectorizedBacktest( - strategy as VectorizedStrategy - ); - - // Run event-driven backtest (realistic) - const eventResult = await this.runEventBacktest(strategy); - - // Compare results - const performanceDiff = Math.abs( - vectorResult.performance.totalReturn - - eventResult.performance.totalReturn - ); - - const isValid = performanceDiff < tolerance; - - this.logger.info('Hybrid validation completed', { - isValid, - performanceDifference: performanceDiff, - recommendation: isValid ? 'vectorized' : 'event-driven' - }); - - return { - isValid, - performanceDifference: performanceDiff, - vectorizedResult: vectorResult, - eventResult, - recommendation: isValid ? - 'Vectorized results are reliable for this strategy' : - 'Use event-driven backtesting for accurate results' - }; - } - - // Standard interface methods delegate to event mode - async executeOrder(order: Order): Promise { - return await this.eventMode.executeOrder(order); - } - - getCurrentTime(): Date { - return this.eventMode.getCurrentTime(); - } - - async getMarketData(symbol: string): Promise { - return await this.eventMode.getMarketData(symbol); - } - - async publishEvent(event: string, data: any): Promise { - await this.eventMode.publishEvent(event, data); - } -} -``` - -## Unified Strategy Implementation - -### Base Strategy Framework - -```typescript -export abstract class BaseStrategy { - protected mode: ExecutionMode; - protected logger = createLogger(this.constructor.name); - - abstract name: string; - abstract parameters: Record; - - constructor(mode: ExecutionMode) { - this.mode = mode; - } - - // Works identically across all modes - abstract onPriceUpdate(data: PriceData): Promise; - abstract onIndicatorUpdate(data: IndicatorData): Promise; - - protected async emitSignal(signal: TradeSignal): Promise { - this.logger.debug('Emitting trade signal', { signal }); - - // Mode handles whether this is live, simulated, or vectorized - const order = this.createOrder(signal); - const result = await this.mode.executeOrder(order); - - await this.mode.publishEvent('trade.executed', { - signal, - order, - result, - timestamp: this.mode.getCurrentTime() - }); - } - - private createOrder(signal: TradeSignal): Order { - return { - id: generateId(), - symbol: signal.symbol, - side: signal.action, - quantity: signal.quantity, - type: 'market', - timestamp: this.mode.getCurrentTime() - }; - } -} - -// Vectorized strategy interface -export interface VectorizedStrategy { - name: string; - parameters: Record; - generatePositions(data: DataFrame, indicators: any): number[]; -} -``` - -### Example Strategy Implementation - -```typescript -export class SMAStrategy extends BaseStrategy implements VectorizedStrategy { - name = 'SMA-Crossover'; - parameters = { fastPeriod: 10, slowPeriod: 20 }; - - private fastSMA: number[] = []; - private slowSMA: number[] = []; - - async onPriceUpdate(data: PriceData): Promise { - // Same logic for live, event-driven, and hybrid modes - this.fastSMA.push(data.close); - this.slowSMA.push(data.close); - - if (this.fastSMA.length > this.parameters.fastPeriod) { - this.fastSMA.shift(); - } - if (this.slowSMA.length > this.parameters.slowPeriod) { - this.slowSMA.shift(); - } - - if (this.fastSMA.length === this.parameters.fastPeriod && - this.slowSMA.length === this.parameters.slowPeriod) { - - const fastAvg = sma(this.fastSMA, this.parameters.fastPeriod)[0]; - const slowAvg = sma(this.slowSMA, this.parameters.slowPeriod)[0]; - - if (fastAvg > slowAvg) { - await this.emitSignal({ - symbol: data.symbol, - action: 'BUY', - quantity: 100, - confidence: 0.8 - }); - } else if (fastAvg < slowAvg) { - await this.emitSignal({ - symbol: data.symbol, - action: 'SELL', - quantity: 100, - confidence: 0.8 - }); - } - } - } - - async onIndicatorUpdate(data: IndicatorData): Promise { - // Handle pre-calculated indicators - } - - // Vectorized implementation for fast backtesting - generatePositions(data: DataFrame, indicators: any): number[] { - const { sma20: fastSMA, sma50: slowSMA } = indicators; - - return fastSMA.map((fast, i) => { - const slow = slowSMA[i]; - if (isNaN(fast) || isNaN(slow)) return 0; - - // Long when fast > slow, short when fast < slow - return fast > slow ? 1 : (fast < slow ? -1 : 0); - }); - } -} -``` - -## Mode Factory and Service Integration - -### Mode Factory - -```typescript -export class ModeFactory { - static create(mode: BacktestMode, config: any): ExecutionMode { - switch (mode) { - case BacktestMode.LIVE: - return new LiveMode(); - case BacktestMode.EVENT_DRIVEN: - return new EventBacktestMode(config); - case BacktestMode.VECTORIZED: - return new VectorBacktestMode(config); - case BacktestMode.HYBRID: - return new HybridBacktestMode(config); - default: - throw new Error(`Unknown mode: ${mode}`); - } - } -} -``` - -### Strategy Service Integration - -```typescript -export class StrategyService { - private logger = createLogger('strategy-service'); - - async runStrategy( - strategyName: string, - mode: BacktestMode, - config: any - ): Promise { - - const executionMode = ModeFactory.create(mode, config); - const strategy = await this.loadStrategy(strategyName, executionMode); - - this.logger.info('Starting strategy execution', { - strategy: strategyName, - mode, - config - }); - - switch (mode) { - case BacktestMode.LIVE: - return await this.runLiveStrategy(strategy); - - case BacktestMode.EVENT_DRIVEN: - return await this.runEventBacktest(strategy, config); - - case BacktestMode.VECTORIZED: - return await (executionMode as VectorBacktestMode) - .executeVectorizedBacktest(strategy as VectorizedStrategy); - - case BacktestMode.HYBRID: - return await (executionMode as HybridBacktestMode) - .validateStrategy(strategy, config.tolerance); - - default: - throw new Error(`Unsupported mode: ${mode}`); - } - } - - async optimizeStrategy( - strategyName: string, - parameterGrid: Record, - config: BacktestConfig - ): Promise { - - const results: OptimizationResult[] = []; - const combinations = this.generateParameterCombinations(parameterGrid); - - this.logger.info('Starting parameter optimization', { - strategy: strategyName, - combinations: combinations.length - }); - - // Use vectorized mode for fast parameter optimization - const vectorMode = new VectorBacktestMode(config); - - // Can be parallelized - await Promise.all( - combinations.map(async (params) => { - const strategy = await this.loadStrategy(strategyName, vectorMode, params); - const result = await vectorMode.executeVectorizedBacktest( - strategy as VectorizedStrategy - ); - - results.push({ - parameters: params, - performance: result.performance, - executionTime: result.executionTime - }); - }) - ); - - // Sort by Sharpe ratio - return results.sort((a, b) => - b.performance.sharpeRatio - a.performance.sharpeRatio - ); - } -} -``` - -## Service Configuration - -### Environment-Based Mode Selection - -```typescript -export class ServiceConfig { - getTradingConfig(): TradingConfig { - return { - mode: (process.env.TRADING_MODE as BacktestMode) || BacktestMode.LIVE, - brokerConfig: { - apiKey: process.env.BROKER_API_KEY, - sandbox: process.env.BROKER_SANDBOX === 'true' - }, - backtestConfig: { - startDate: new Date(process.env.BACKTEST_START_DATE || '2023-01-01'), - endDate: new Date(process.env.BACKTEST_END_DATE || '2024-01-01'), - initialCapital: parseFloat(process.env.INITIAL_CAPITAL || '100000'), - slippageModel: process.env.SLIPPAGE_MODEL || 'linear', - commissionModel: process.env.COMMISSION_MODEL || 'fixed' - } - }; - } -} -``` - -### CLI Interface - -```typescript -// CLI for running different modes -import { Command } from 'commander'; - -const program = new Command(); - -program - .name('stock-bot') - .description('Stock Trading Bot with Multi-Mode Backtesting'); - -program - .command('live') - .description('Run live trading') - .option('-s, --strategy ', 'Strategy to run') - .action(async (options) => { - const strategyService = new StrategyService(); - await strategyService.runStrategy( - options.strategy, - BacktestMode.LIVE, - {} - ); - }); - -program - .command('backtest') - .description('Run backtesting') - .option('-s, --strategy ', 'Strategy to test') - .option('-m, --mode ', 'Backtest mode (event|vector|hybrid)', 'event') - .option('-f, --from ', 'Start date') - .option('-t, --to ', 'End date') - .action(async (options) => { - const strategyService = new StrategyService(); - await strategyService.runStrategy( - options.strategy, - options.mode as BacktestMode, - { - startDate: new Date(options.from), - endDate: new Date(options.to) - } - ); - }); - -program - .command('optimize') - .description('Optimize strategy parameters') - .option('-s, --strategy ', 'Strategy to optimize') - .option('-p, --params ', 'Parameter grid JSON') - .action(async (options) => { - const strategyService = new StrategyService(); - const paramGrid = JSON.parse(options.params); - await strategyService.optimizeStrategy( - options.strategy, - paramGrid, - {} - ); - }); - -program.parse(); -``` - -## Performance Comparison - -### Execution Speed by Mode - -| Mode | Data Points/Second | Memory Usage | Use Case | -|------|-------------------|--------------|----------| -| **Live** | Real-time | Low | Production trading | -| **Event-Driven** | ~1,000 | Medium | Realistic validation | -| **Vectorized** | ~100,000+ | High | Parameter optimization | -| **Hybrid** | Combined | Medium | Strategy validation | - -### When to Use Each Mode - -- **Live Mode**: Production trading with real money -- **Event-Driven**: Final strategy validation, complex order logic -- **Vectorized**: Initial development, parameter optimization, quick testing -- **Hybrid**: Validating vectorized results against realistic simulation - -## Integration with Your Existing Libraries - -This architecture leverages all your existing infrastructure: - -- **@stock-bot/config**: Environment management -- **@stock-bot/logger**: Comprehensive logging with Loki -- **@stock-bot/utils**: All technical indicators and calculations -- **@stock-bot/questdb-client**: Time-series data storage -- **@stock-bot/postgres-client**: Transactional data -- **@stock-bot/mongodb-client**: Configuration storage - -## Key Benefits - -1. **Unified Codebase**: Same strategy logic across all modes -2. **Performance Flexibility**: Choose speed vs accuracy based on needs -3. **Validation Pipeline**: Hybrid mode ensures vectorized results are accurate -4. **Production Ready**: Live mode for actual trading -5. **Development Friendly**: Fast iteration with vectorized backtesting - -This simplified architecture reduces complexity while providing comprehensive backtesting capabilities that scale from rapid prototyping to production trading. +# Stock Bot - System Architecture + +> **Updated**: June 2025 + +## Overview + +TypeScript microservices architecture for automated stock trading with real-time data processing and multi-database storage. + +## Core Services + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Data Service β”‚ β”‚Processing Serviceβ”‚ β”‚Strategy Service β”‚ +β”‚ β€’ Market Data │────▢│ β€’ Indicators │────▢│ β€’ Strategies β”‚ +β”‚ β€’ Providers β”‚ β”‚ β€’ Analytics β”‚ β”‚ β€’ Backtesting β”‚ +β”‚ β€’ QuestDB β”‚ β”‚ β€’ Validation β”‚ β”‚ β€’ Signal Gen β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ β”‚ β”‚ + β”‚ β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”‚ + └──────────────▢│ Event Bus β”‚β—€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ (Dragonfly) β”‚ + β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚Execution Serviceβ”‚ β”‚Portfolio Serviceβ”‚ β”‚ Dashboard β”‚ +β”‚ β€’ Order Mgmt β”‚ β”‚ β€’ Positions β”‚ β”‚ β€’ Angular UI β”‚ +β”‚ β€’ Risk Control β”‚ β”‚ β€’ Risk Mgmt β”‚ β”‚ β€’ Real-time β”‚ +β”‚ β€’ Execution β”‚ β”‚ β€’ Performance β”‚ β”‚ β€’ Analytics β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + +## Services Structure + +``` +stock-bot/ +β”œβ”€β”€ apps/ +β”‚ β”œβ”€β”€ data-service/ # Market data ingestion & storage +β”‚ β”œβ”€β”€ execution-service/ # Order execution & broker integration +β”‚ β”œβ”€β”€ portfolio-service/ # Position & risk management +β”‚ β”œβ”€β”€ processing-service/ # Data processing & indicators +β”‚ β”œβ”€β”€ strategy-service/ # Trading strategies & backtesting +β”‚ └── dashboard/ # Angular UI (port 4200) +β”‚ +β”œβ”€β”€ libs/ # Shared libraries +β”‚ β”œβ”€β”€ logger/ # Centralized logging w/ Loki +β”‚ β”œβ”€β”€ config/ # Configuration management +β”‚ β”œβ”€β”€ event-bus/ # Event system +β”‚ β”œβ”€β”€ mongodb-client/ # MongoDB operations +β”‚ β”œβ”€β”€ postgres-client/ # PostgreSQL operations +β”‚ β”œβ”€β”€ questdb-client/ # Time-series data +β”‚ β”œβ”€β”€ http/ # HTTP client w/ proxy support +β”‚ β”œβ”€β”€ cache/ # Caching layer +β”‚ └── utils/ # Common utilities +β”‚ +└── database/ # Database configurations + β”œβ”€β”€ mongodb/init/ + └── postgres/init/ +``` + +## Technology Stack + +| Component | Technology | Purpose | +|-----------|------------|---------| +| **Runtime** | Bun | Fast JavaScript runtime | +| **Language** | TypeScript | Type-safe development | +| **Databases** | PostgreSQL, MongoDB, QuestDB | Multi-database architecture | +| **Caching** | Dragonfly (Redis) | Event bus & caching | +| **Frontend** | Angular 18 | Modern reactive UI | +| **Monitoring** | Prometheus, Grafana, Loki | Observability stack | + +## Quick Start + +```bash +# Install dependencies +bun install + +# Start infrastructure +bun run infra:up + +# Start services +bun run dev + +# Access dashboard +# http://localhost:4200 +``` + +## Key Features + +- **Real-time Trading**: Live market data & order execution +- **Multi-Database**: PostgreSQL, MongoDB, QuestDB for different data types +- **Event-Driven**: Asynchronous communication via Dragonfly +- **Monitoring**: Full observability with metrics, logs, and tracing +- **Modular**: Shared libraries for common functionality +- **Type-Safe**: Full TypeScript coverage +β”‚ β”œβ”€β”€ processing-service/ # Combined processing & indicators +β”‚ β”‚ β”œβ”€β”€ src/ +β”‚ β”‚ β”‚ β”œβ”€β”€ indicators/ # Technical indicators (uses @stock-bot/utils) +β”‚ β”‚ β”‚ β”œβ”€β”€ processors/ # Data processing pipeline +β”‚ β”‚ β”‚ β”œβ”€β”€ vectorized/ # Vectorized calculations +β”‚ β”‚ β”‚ β”œβ”€β”€ services/ +β”‚ β”‚ β”‚ └── index.ts +β”‚ β”‚ └── package.json +β”‚ β”‚ +β”‚ β”œβ”€β”€ strategy-service/ # Combined strategy & backtesting +β”‚ β”‚ β”œβ”€β”€ src/ +β”‚ β”‚ β”‚ β”œβ”€β”€ strategies/ # Strategy implementations +β”‚ β”‚ β”‚ β”œβ”€β”€ backtesting/ # Multi-mode backtesting engine +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ modes/ # Backtesting modes +β”‚ β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ live-mode.ts # Live trading mode +β”‚ β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ event-mode.ts # Event-driven backtest +β”‚ β”‚ β”‚ β”‚ β”‚ └── vector-mode.ts # Vectorized backtest +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ engines/ # Execution engines +β”‚ β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ event-engine.ts # Event-based simulation +β”‚ β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ vector-engine.ts # Vectorized calculations +β”‚ β”‚ β”‚ β”‚ β”‚ └── hybrid-engine.ts # Combined validation +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ simulator.ts # Market simulator +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ runner.ts # Backtest orchestrator +β”‚ β”‚ β”‚ β”‚ └── metrics.ts # Performance analysis +β”‚ β”‚ β”‚ β”œβ”€β”€ live/ # Live strategy execution +β”‚ β”‚ β”‚ β”œβ”€β”€ framework/ # Strategy framework +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ base-strategy.ts +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ execution-mode.ts +β”‚ β”‚ β”‚ β”‚ └── mode-factory.ts +β”‚ β”‚ β”‚ └── index.ts +β”‚ β”‚ └── package.json +β”‚ β”‚ +β”‚ β”œβ”€β”€ execution-service/ # Combined order execution & simulation +β”‚ β”‚ β”œβ”€β”€ src/ +β”‚ β”‚ β”‚ β”œβ”€β”€ brokers/ # Live broker adapters +β”‚ β”‚ β”‚ β”œβ”€β”€ simulation/ # Simulated execution +β”‚ β”‚ β”‚ β”œβ”€β”€ unified/ # Unified execution interface +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ executor.ts # Abstract executor +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ live-executor.ts +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ sim-executor.ts +β”‚ β”‚ β”‚ β”‚ └── vector-executor.ts +β”‚ β”‚ β”‚ └── index.ts +β”‚ β”‚ └── package.json +β”‚ β”‚ +β”‚ β”œβ”€β”€ portfolio-service/ # Combined portfolio & risk management +β”‚ β”‚ β”œβ”€β”€ src/ +β”‚ β”‚ β”‚ β”œβ”€β”€ portfolio/ # Portfolio tracking +β”‚ β”‚ β”‚ β”œβ”€β”€ risk/ # Risk management (uses @stock-bot/utils) +β”‚ β”‚ β”‚ β”œβ”€β”€ positions/ # Position management +β”‚ β”‚ β”‚ β”œβ”€β”€ performance/ # Performance tracking +β”‚ β”‚ β”‚ └── index.ts +β”‚ β”‚ └── package.json +β”‚ β”‚ +β”‚ └── dashboard/ # Combined API & reporting +β”‚ β”œβ”€β”€ src/ +β”‚ β”‚ β”œβ”€β”€ api/ # REST API +β”‚ β”‚ β”œβ”€β”€ web/ # Web interface (Angular) +β”‚ β”‚ β”œβ”€β”€ reports/ # Report generation +β”‚ β”‚ β”œβ”€β”€ websockets/ # Real-time updates +β”‚ β”‚ └── index.ts +β”‚ └── package.json +β”‚ +β”œβ”€β”€ libs/ # βœ… Your existing shared libraries +β”‚ β”œβ”€β”€ config/ # βœ… Environment configuration +β”‚ β”œβ”€β”€ http/ # βœ… HTTP utilities +β”‚ β”œβ”€β”€ logger/ # βœ… Loki-integrated logging +β”‚ β”œβ”€β”€ mongodb-client/ # βœ… MongoDB operations +β”‚ β”œβ”€β”€ postgres-client/ # βœ… PostgreSQL operations +β”‚ β”œβ”€β”€ questdb-client/ # βœ… Time-series data +β”‚ β”œβ”€β”€ types/ # βœ… Shared TypeScript types +β”‚ β”œβ”€β”€ utils/ # βœ… Calculations & utilities +β”‚ β”œβ”€β”€ event-bus/ # πŸ†• Dragonfly event system +β”‚ β”œβ”€β”€ strategy-engine/ # πŸ†• Strategy framework +β”‚ β”œβ”€β”€ vector-engine/ # πŸ†• Vectorized calculations +β”‚ └── data-frame/ # πŸ†• DataFrame operations +``` + +## Multi-Mode Backtesting Architecture + +### 1. Execution Mode Framework + +```typescript +export abstract class ExecutionMode { + protected logger = createLogger(this.constructor.name); + protected config = new ServiceConfig(); + + abstract name: string; + abstract executeOrder(order: Order): Promise; + abstract getCurrentTime(): Date; + abstract getMarketData(symbol: string): Promise; + abstract publishEvent(event: string, data: any): Promise; +} + +export enum BacktestMode { + LIVE = 'live', + EVENT_DRIVEN = 'event-driven', + VECTORIZED = 'vectorized', + HYBRID = 'hybrid' +} +``` + +### 2. Live Trading Mode + +```typescript +export class LiveMode extends ExecutionMode { + name = 'live'; + private broker = new BrokerClient(this.config.getBrokerConfig()); + private eventBus = new EventBus(); + + async executeOrder(order: Order): Promise { + this.logger.info('Executing live order', { orderId: order.id }); + + // Execute via real broker + const result = await this.broker.placeOrder(order); + + // Publish to event bus + await this.eventBus.publish('order.executed', result); + + return result; + } + + getCurrentTime(): Date { + return new Date(); // Real time + } + + async getMarketData(symbol: string): Promise { + // Get live market data + return await this.marketDataService.getLiveData(symbol); + } + + async publishEvent(event: string, data: any): Promise { + await this.eventBus.publish(event, data); + } +} +``` + +### 3. Event-Driven Backtesting Mode + +```typescript +export class EventBacktestMode extends ExecutionMode { + name = 'event-driven'; + private simulator = new MarketSimulator(); + private eventBus = new InMemoryEventBus(); // In-memory for simulation + private simulationTime: Date; + private historicalData: Map; + + constructor(private config: BacktestConfig) { + super(); + this.simulationTime = config.startDate; + } + + async executeOrder(order: Order): Promise { + this.logger.debug('Simulating order execution', { + orderId: order.id, + simulationTime: this.simulationTime + }); + + // Realistic order simulation with slippage, fees + const result = await this.simulator.executeOrder(order, { + currentTime: this.simulationTime, + marketData: await this.getMarketData(order.symbol), + slippageModel: this.config.slippageModel, + commissionModel: this.config.commissionModel + }); + + // Publish to simulation event bus + await this.eventBus.publish('order.executed', result); + + return result; + } + + getCurrentTime(): Date { + return this.simulationTime; + } + + async getMarketData(symbol: string): Promise { + const data = this.historicalData.get(symbol) || []; + return data.find(d => d.timestamp <= this.simulationTime) || null; + } + + async publishEvent(event: string, data: any): Promise { + await this.eventBus.publish(event, data); + } + + // Progress simulation time + advanceTime(newTime: Date): void { + this.simulationTime = newTime; + } +} +``` + +### 4. Vectorized Backtesting Mode + +```typescript +export class VectorBacktestMode extends ExecutionMode { + name = 'vectorized'; + private dataFrame: DataFrame; + private currentIndex: number = 0; + + constructor(private config: VectorBacktestConfig) { + super(); + this.dataFrame = new DataFrame(config.historicalData); + } + + // Vectorized execution - processes entire dataset at once + async executeVectorizedBacktest(strategy: VectorizedStrategy): Promise { + const startTime = Date.now(); + + this.logger.info('Starting vectorized backtest', { + strategy: strategy.name, + dataPoints: this.dataFrame.length + }); + + // Generate all signals at once using your utils library + const signals = this.generateVectorizedSignals(strategy); + + // Calculate performance metrics vectorized + const performance = this.calculateVectorizedPerformance(signals); + + // Apply trading costs if specified + if (this.config.tradingCosts) { + this.applyTradingCosts(performance, signals); + } + + const executionTime = Date.now() - startTime; + + this.logger.info('Vectorized backtest completed', { + executionTime, + totalReturn: performance.totalReturn, + sharpeRatio: performance.sharpeRatio + }); + + return { + mode: 'vectorized', + strategy: strategy.name, + performance, + executionTime, + signals + }; + } + + private generateVectorizedSignals(strategy: VectorizedStrategy): DataFrame { + const prices = this.dataFrame.get('close'); + + // Use your existing technical indicators from @stock-bot/utils + const indicators = { + sma20: sma(prices, 20), + sma50: sma(prices, 50), + rsi: rsi(prices, 14), + macd: macd(prices) + }; + + // Generate position signals vectorized + const positions = strategy.generatePositions(this.dataFrame, indicators); + + return new DataFrame({ + ...this.dataFrame.toObject(), + ...indicators, + positions + }); + } + + private calculateVectorizedPerformance(signals: DataFrame): PerformanceMetrics { + const prices = signals.get('close'); + const positions = signals.get('positions'); + + // Calculate returns vectorized + const returns = prices.slice(1).map((price, i) => + (price - prices[i]) / prices[i] + ); + + // Strategy returns = position[t-1] * market_return[t] + const strategyReturns = returns.map((ret, i) => + (positions[i] || 0) * ret + ); + + // Use your existing performance calculation utilities + return { + totalReturn: calculateTotalReturn(strategyReturns), + sharpeRatio: calculateSharpeRatio(strategyReturns), + maxDrawdown: calculateMaxDrawdown(strategyReturns), + volatility: calculateVolatility(strategyReturns), + winRate: calculateWinRate(strategyReturns) + }; + } + + // Standard interface methods (not used in vectorized mode) + async executeOrder(order: Order): Promise { + throw new Error('Use executeVectorizedBacktest for vectorized mode'); + } + + getCurrentTime(): Date { + return this.dataFrame.getTimestamp(this.currentIndex); + } + + async getMarketData(symbol: string): Promise { + return this.dataFrame.getRow(this.currentIndex); + } + + async publishEvent(event: string, data: any): Promise { + // No-op for vectorized mode + } +} +``` + +### 5. Hybrid Validation Mode + +```typescript +export class HybridBacktestMode extends ExecutionMode { + name = 'hybrid'; + private eventMode: EventBacktestMode; + private vectorMode: VectorBacktestMode; + + constructor(config: BacktestConfig) { + super(); + this.eventMode = new EventBacktestMode(config); + this.vectorMode = new VectorBacktestMode(config); + } + + async validateStrategy( + strategy: BaseStrategy, + tolerance: number = 0.001 + ): Promise { + + this.logger.info('Starting hybrid validation', { + strategy: strategy.name, + tolerance + }); + + // Run vectorized backtest (fast) + const vectorResult = await this.vectorMode.executeVectorizedBacktest( + strategy as VectorizedStrategy + ); + + // Run event-driven backtest (realistic) + const eventResult = await this.runEventBacktest(strategy); + + // Compare results + const performanceDiff = Math.abs( + vectorResult.performance.totalReturn - + eventResult.performance.totalReturn + ); + + const isValid = performanceDiff < tolerance; + + this.logger.info('Hybrid validation completed', { + isValid, + performanceDifference: performanceDiff, + recommendation: isValid ? 'vectorized' : 'event-driven' + }); + + return { + isValid, + performanceDifference: performanceDiff, + vectorizedResult: vectorResult, + eventResult, + recommendation: isValid ? + 'Vectorized results are reliable for this strategy' : + 'Use event-driven backtesting for accurate results' + }; + } + + // Standard interface methods delegate to event mode + async executeOrder(order: Order): Promise { + return await this.eventMode.executeOrder(order); + } + + getCurrentTime(): Date { + return this.eventMode.getCurrentTime(); + } + + async getMarketData(symbol: string): Promise { + return await this.eventMode.getMarketData(symbol); + } + + async publishEvent(event: string, data: any): Promise { + await this.eventMode.publishEvent(event, data); + } +} +``` + +## Unified Strategy Implementation + +### Base Strategy Framework + +```typescript +export abstract class BaseStrategy { + protected mode: ExecutionMode; + protected logger = createLogger(this.constructor.name); + + abstract name: string; + abstract parameters: Record; + + constructor(mode: ExecutionMode) { + this.mode = mode; + } + + // Works identically across all modes + abstract onPriceUpdate(data: PriceData): Promise; + abstract onIndicatorUpdate(data: IndicatorData): Promise; + + protected async emitSignal(signal: TradeSignal): Promise { + this.logger.debug('Emitting trade signal', { signal }); + + // Mode handles whether this is live, simulated, or vectorized + const order = this.createOrder(signal); + const result = await this.mode.executeOrder(order); + + await this.mode.publishEvent('trade.executed', { + signal, + order, + result, + timestamp: this.mode.getCurrentTime() + }); + } + + private createOrder(signal: TradeSignal): Order { + return { + id: generateId(), + symbol: signal.symbol, + side: signal.action, + quantity: signal.quantity, + type: 'market', + timestamp: this.mode.getCurrentTime() + }; + } +} + +// Vectorized strategy interface +export interface VectorizedStrategy { + name: string; + parameters: Record; + generatePositions(data: DataFrame, indicators: any): number[]; +} +``` + +### Example Strategy Implementation + +```typescript +export class SMAStrategy extends BaseStrategy implements VectorizedStrategy { + name = 'SMA-Crossover'; + parameters = { fastPeriod: 10, slowPeriod: 20 }; + + private fastSMA: number[] = []; + private slowSMA: number[] = []; + + async onPriceUpdate(data: PriceData): Promise { + // Same logic for live, event-driven, and hybrid modes + this.fastSMA.push(data.close); + this.slowSMA.push(data.close); + + if (this.fastSMA.length > this.parameters.fastPeriod) { + this.fastSMA.shift(); + } + if (this.slowSMA.length > this.parameters.slowPeriod) { + this.slowSMA.shift(); + } + + if (this.fastSMA.length === this.parameters.fastPeriod && + this.slowSMA.length === this.parameters.slowPeriod) { + + const fastAvg = sma(this.fastSMA, this.parameters.fastPeriod)[0]; + const slowAvg = sma(this.slowSMA, this.parameters.slowPeriod)[0]; + + if (fastAvg > slowAvg) { + await this.emitSignal({ + symbol: data.symbol, + action: 'BUY', + quantity: 100, + confidence: 0.8 + }); + } else if (fastAvg < slowAvg) { + await this.emitSignal({ + symbol: data.symbol, + action: 'SELL', + quantity: 100, + confidence: 0.8 + }); + } + } + } + + async onIndicatorUpdate(data: IndicatorData): Promise { + // Handle pre-calculated indicators + } + + // Vectorized implementation for fast backtesting + generatePositions(data: DataFrame, indicators: any): number[] { + const { sma20: fastSMA, sma50: slowSMA } = indicators; + + return fastSMA.map((fast, i) => { + const slow = slowSMA[i]; + if (isNaN(fast) || isNaN(slow)) return 0; + + // Long when fast > slow, short when fast < slow + return fast > slow ? 1 : (fast < slow ? -1 : 0); + }); + } +} +``` + +## Mode Factory and Service Integration + +### Mode Factory + +```typescript +export class ModeFactory { + static create(mode: BacktestMode, config: any): ExecutionMode { + switch (mode) { + case BacktestMode.LIVE: + return new LiveMode(); + case BacktestMode.EVENT_DRIVEN: + return new EventBacktestMode(config); + case BacktestMode.VECTORIZED: + return new VectorBacktestMode(config); + case BacktestMode.HYBRID: + return new HybridBacktestMode(config); + default: + throw new Error(`Unknown mode: ${mode}`); + } + } +} +``` + +### Strategy Service Integration + +```typescript +export class StrategyService { + private logger = createLogger('strategy-service'); + + async runStrategy( + strategyName: string, + mode: BacktestMode, + config: any + ): Promise { + + const executionMode = ModeFactory.create(mode, config); + const strategy = await this.loadStrategy(strategyName, executionMode); + + this.logger.info('Starting strategy execution', { + strategy: strategyName, + mode, + config + }); + + switch (mode) { + case BacktestMode.LIVE: + return await this.runLiveStrategy(strategy); + + case BacktestMode.EVENT_DRIVEN: + return await this.runEventBacktest(strategy, config); + + case BacktestMode.VECTORIZED: + return await (executionMode as VectorBacktestMode) + .executeVectorizedBacktest(strategy as VectorizedStrategy); + + case BacktestMode.HYBRID: + return await (executionMode as HybridBacktestMode) + .validateStrategy(strategy, config.tolerance); + + default: + throw new Error(`Unsupported mode: ${mode}`); + } + } + + async optimizeStrategy( + strategyName: string, + parameterGrid: Record, + config: BacktestConfig + ): Promise { + + const results: OptimizationResult[] = []; + const combinations = this.generateParameterCombinations(parameterGrid); + + this.logger.info('Starting parameter optimization', { + strategy: strategyName, + combinations: combinations.length + }); + + // Use vectorized mode for fast parameter optimization + const vectorMode = new VectorBacktestMode(config); + + // Can be parallelized + await Promise.all( + combinations.map(async (params) => { + const strategy = await this.loadStrategy(strategyName, vectorMode, params); + const result = await vectorMode.executeVectorizedBacktest( + strategy as VectorizedStrategy + ); + + results.push({ + parameters: params, + performance: result.performance, + executionTime: result.executionTime + }); + }) + ); + + // Sort by Sharpe ratio + return results.sort((a, b) => + b.performance.sharpeRatio - a.performance.sharpeRatio + ); + } +} +``` + +## Service Configuration + +### Environment-Based Mode Selection + +```typescript +export class ServiceConfig { + getTradingConfig(): TradingConfig { + return { + mode: (process.env.TRADING_MODE as BacktestMode) || BacktestMode.LIVE, + brokerConfig: { + apiKey: process.env.BROKER_API_KEY, + sandbox: process.env.BROKER_SANDBOX === 'true' + }, + backtestConfig: { + startDate: new Date(process.env.BACKTEST_START_DATE || '2023-01-01'), + endDate: new Date(process.env.BACKTEST_END_DATE || '2024-01-01'), + initialCapital: parseFloat(process.env.INITIAL_CAPITAL || '100000'), + slippageModel: process.env.SLIPPAGE_MODEL || 'linear', + commissionModel: process.env.COMMISSION_MODEL || 'fixed' + } + }; + } +} +``` + +### CLI Interface + +```typescript +// CLI for running different modes +import { Command } from 'commander'; + +const program = new Command(); + +program + .name('stock-bot') + .description('Stock Trading Bot with Multi-Mode Backtesting'); + +program + .command('live') + .description('Run live trading') + .option('-s, --strategy ', 'Strategy to run') + .action(async (options) => { + const strategyService = new StrategyService(); + await strategyService.runStrategy( + options.strategy, + BacktestMode.LIVE, + {} + ); + }); + +program + .command('backtest') + .description('Run backtesting') + .option('-s, --strategy ', 'Strategy to test') + .option('-m, --mode ', 'Backtest mode (event|vector|hybrid)', 'event') + .option('-f, --from ', 'Start date') + .option('-t, --to ', 'End date') + .action(async (options) => { + const strategyService = new StrategyService(); + await strategyService.runStrategy( + options.strategy, + options.mode as BacktestMode, + { + startDate: new Date(options.from), + endDate: new Date(options.to) + } + ); + }); + +program + .command('optimize') + .description('Optimize strategy parameters') + .option('-s, --strategy ', 'Strategy to optimize') + .option('-p, --params ', 'Parameter grid JSON') + .action(async (options) => { + const strategyService = new StrategyService(); + const paramGrid = JSON.parse(options.params); + await strategyService.optimizeStrategy( + options.strategy, + paramGrid, + {} + ); + }); + +program.parse(); +``` + +## Performance Comparison + +### Execution Speed by Mode + +| Mode | Data Points/Second | Memory Usage | Use Case | +|------|-------------------|--------------|----------| +| **Live** | Real-time | Low | Production trading | +| **Event-Driven** | ~1,000 | Medium | Realistic validation | +| **Vectorized** | ~100,000+ | High | Parameter optimization | +| **Hybrid** | Combined | Medium | Strategy validation | + +### When to Use Each Mode + +- **Live Mode**: Production trading with real money +- **Event-Driven**: Final strategy validation, complex order logic +- **Vectorized**: Initial development, parameter optimization, quick testing +- **Hybrid**: Validating vectorized results against realistic simulation + +## Integration with Your Existing Libraries + +This architecture leverages all your existing infrastructure: + +- **@stock-bot/config**: Environment management +- **@stock-bot/logger**: Comprehensive logging with Loki +- **@stock-bot/utils**: All technical indicators and calculations +- **@stock-bot/questdb-client**: Time-series data storage +- **@stock-bot/postgres-client**: Transactional data +- **@stock-bot/mongodb-client**: Configuration storage + +## Key Benefits + +1. **Unified Codebase**: Same strategy logic across all modes +2. **Performance Flexibility**: Choose speed vs accuracy based on needs +3. **Validation Pipeline**: Hybrid mode ensures vectorized results are accurate +4. **Production Ready**: Live mode for actual trading +5. **Development Friendly**: Fast iteration with vectorized backtesting + +This simplified architecture reduces complexity while providing comprehensive backtesting capabilities that scale from rapid prototyping to production trading. diff --git a/apps/dashboard/.editorconfig b/apps/dashboard/.editorconfig index f166060..19a5b83 100644 --- a/apps/dashboard/.editorconfig +++ b/apps/dashboard/.editorconfig @@ -1,17 +1,17 @@ -# Editor configuration, see https://editorconfig.org -root = true - -[*] -charset = utf-8 -indent_style = space -indent_size = 2 -insert_final_newline = true -trim_trailing_whitespace = true - -[*.ts] -quote_type = single -ij_typescript_use_double_quotes = false - -[*.md] -max_line_length = off -trim_trailing_whitespace = false +# Editor configuration, see https://editorconfig.org +root = true + +[*] +charset = utf-8 +indent_style = space +indent_size = 2 +insert_final_newline = true +trim_trailing_whitespace = true + +[*.ts] +quote_type = single +ij_typescript_use_double_quotes = false + +[*.md] +max_line_length = off +trim_trailing_whitespace = false diff --git a/apps/dashboard/.gitignore b/apps/dashboard/.gitignore index cc7b141..70583d7 100644 --- a/apps/dashboard/.gitignore +++ b/apps/dashboard/.gitignore @@ -1,42 +1,42 @@ -# See https://docs.github.com/get-started/getting-started-with-git/ignoring-files for more about ignoring files. - -# Compiled output -/dist -/tmp -/out-tsc -/bazel-out - -# Node -/node_modules -npm-debug.log -yarn-error.log - -# IDEs and editors -.idea/ -.project -.classpath -.c9/ -*.launch -.settings/ -*.sublime-workspace - -# Visual Studio Code -.vscode/* -!.vscode/settings.json -!.vscode/tasks.json -!.vscode/launch.json -!.vscode/extensions.json -.history/* - -# Miscellaneous -/.angular/cache -.sass-cache/ -/connect.lock -/coverage -/libpeerconnection.log -testem.log -/typings - -# System files -.DS_Store -Thumbs.db +# See https://docs.github.com/get-started/getting-started-with-git/ignoring-files for more about ignoring files. + +# Compiled output +/dist +/tmp +/out-tsc +/bazel-out + +# Node +/node_modules +npm-debug.log +yarn-error.log + +# IDEs and editors +.idea/ +.project +.classpath +.c9/ +*.launch +.settings/ +*.sublime-workspace + +# Visual Studio Code +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +.history/* + +# Miscellaneous +/.angular/cache +.sass-cache/ +/connect.lock +/coverage +/libpeerconnection.log +testem.log +/typings + +# System files +.DS_Store +Thumbs.db diff --git a/apps/dashboard/.postcssrc.json b/apps/dashboard/.postcssrc.json index 72f908d..9ca94b5 100644 --- a/apps/dashboard/.postcssrc.json +++ b/apps/dashboard/.postcssrc.json @@ -1,5 +1,5 @@ -{ - "plugins": { - "@tailwindcss/postcss": {} - } +{ + "plugins": { + "@tailwindcss/postcss": {} + } } \ No newline at end of file diff --git a/apps/dashboard/.vscode/extensions.json b/apps/dashboard/.vscode/extensions.json index 77b3745..feccd00 100644 --- a/apps/dashboard/.vscode/extensions.json +++ b/apps/dashboard/.vscode/extensions.json @@ -1,4 +1,4 @@ -{ - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=827846 - "recommendations": ["angular.ng-template"] -} +{ + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=827846 + "recommendations": ["angular.ng-template"] +} diff --git a/apps/dashboard/.vscode/launch.json b/apps/dashboard/.vscode/launch.json index 925af83..278bd60 100644 --- a/apps/dashboard/.vscode/launch.json +++ b/apps/dashboard/.vscode/launch.json @@ -1,20 +1,20 @@ -{ - // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 - "version": "0.2.0", - "configurations": [ - { - "name": "ng serve", - "type": "chrome", - "request": "launch", - "preLaunchTask": "npm: start", - "url": "http://localhost:4200/" - }, - { - "name": "ng test", - "type": "chrome", - "request": "launch", - "preLaunchTask": "npm: test", - "url": "http://localhost:9876/debug.html" - } - ] -} +{ + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "ng serve", + "type": "chrome", + "request": "launch", + "preLaunchTask": "npm: start", + "url": "http://localhost:4200/" + }, + { + "name": "ng test", + "type": "chrome", + "request": "launch", + "preLaunchTask": "npm: test", + "url": "http://localhost:9876/debug.html" + } + ] +} diff --git a/apps/dashboard/.vscode/tasks.json b/apps/dashboard/.vscode/tasks.json index a298b5b..e4f8cf0 100644 --- a/apps/dashboard/.vscode/tasks.json +++ b/apps/dashboard/.vscode/tasks.json @@ -1,42 +1,42 @@ -{ - // For more information, visit: https://go.microsoft.com/fwlink/?LinkId=733558 - "version": "2.0.0", - "tasks": [ - { - "type": "npm", - "script": "start", - "isBackground": true, - "problemMatcher": { - "owner": "typescript", - "pattern": "$tsc", - "background": { - "activeOnStart": true, - "beginsPattern": { - "regexp": "(.*?)" - }, - "endsPattern": { - "regexp": "bundle generation complete" - } - } - } - }, - { - "type": "npm", - "script": "test", - "isBackground": true, - "problemMatcher": { - "owner": "typescript", - "pattern": "$tsc", - "background": { - "activeOnStart": true, - "beginsPattern": { - "regexp": "(.*?)" - }, - "endsPattern": { - "regexp": "bundle generation complete" - } - } - } - } - ] -} +{ + // For more information, visit: https://go.microsoft.com/fwlink/?LinkId=733558 + "version": "2.0.0", + "tasks": [ + { + "type": "npm", + "script": "start", + "isBackground": true, + "problemMatcher": { + "owner": "typescript", + "pattern": "$tsc", + "background": { + "activeOnStart": true, + "beginsPattern": { + "regexp": "(.*?)" + }, + "endsPattern": { + "regexp": "bundle generation complete" + } + } + } + }, + { + "type": "npm", + "script": "test", + "isBackground": true, + "problemMatcher": { + "owner": "typescript", + "pattern": "$tsc", + "background": { + "activeOnStart": true, + "beginsPattern": { + "regexp": "(.*?)" + }, + "endsPattern": { + "regexp": "bundle generation complete" + } + } + } + } + ] +} diff --git a/apps/dashboard/README.md b/apps/dashboard/README.md index 3a442dc..8327d96 100644 --- a/apps/dashboard/README.md +++ b/apps/dashboard/README.md @@ -1,59 +1,59 @@ -# TradingDashboard - -This project was generated using [Angular CLI](https://github.com/angular/angular-cli) version 20.0.0. - -## Development server - -To start a local development server, run: - -```bash -ng serve -``` - -Once the server is running, open your browser and navigate to `http://localhost:4200/`. The application will automatically reload whenever you modify any of the source files. - -## Code scaffolding - -Angular CLI includes powerful code scaffolding tools. To generate a new component, run: - -```bash -ng generate component component-name -``` - -For a complete list of available schematics (such as `components`, `directives`, or `pipes`), run: - -```bash -ng generate --help -``` - -## Building - -To build the project run: - -```bash -ng build -``` - -This will compile your project and store the build artifacts in the `dist/` directory. By default, the production build optimizes your application for performance and speed. - -## Running unit tests - -To execute unit tests with the [Karma](https://karma-runner.github.io) test runner, use the following command: - -```bash -ng test -``` - -## Running end-to-end tests - -For end-to-end (e2e) testing, run: - -```bash -ng e2e -``` - -Angular CLI does not come with an end-to-end testing framework by default. You can choose one that suits your needs. - -## Additional Resources - -For more information on using the Angular CLI, including detailed command references, visit the [Angular CLI Overview and Command Reference](https://angular.dev/tools/cli) page. +# TradingDashboard + +This project was generated using [Angular CLI](https://github.com/angular/angular-cli) version 20.0.0. + +## Development server + +To start a local development server, run: + +```bash +ng serve +``` + +Once the server is running, open your browser and navigate to `http://localhost:4200/`. The application will automatically reload whenever you modify any of the source files. + +## Code scaffolding + +Angular CLI includes powerful code scaffolding tools. To generate a new component, run: + +```bash +ng generate component component-name +``` + +For a complete list of available schematics (such as `components`, `directives`, or `pipes`), run: + +```bash +ng generate --help +``` + +## Building + +To build the project run: + +```bash +ng build +``` + +This will compile your project and store the build artifacts in the `dist/` directory. By default, the production build optimizes your application for performance and speed. + +## Running unit tests + +To execute unit tests with the [Karma](https://karma-runner.github.io) test runner, use the following command: + +```bash +ng test +``` + +## Running end-to-end tests + +For end-to-end (e2e) testing, run: + +```bash +ng e2e +``` + +Angular CLI does not come with an end-to-end testing framework by default. You can choose one that suits your needs. + +## Additional Resources + +For more information on using the Angular CLI, including detailed command references, visit the [Angular CLI Overview and Command Reference](https://angular.dev/tools/cli) page. diff --git a/apps/dashboard/angular.json b/apps/dashboard/angular.json index 2c0e353..ca7fd05 100644 --- a/apps/dashboard/angular.json +++ b/apps/dashboard/angular.json @@ -1,91 +1,91 @@ -{ - "$schema": "./node_modules/@angular/cli/lib/config/schema.json", - "version": 1, - "cli": { - "packageManager": "npm" - }, - "newProjectRoot": "projects", - "projects": { - "trading-dashboard": { - "projectType": "application", "schematics": { - "@schematics/angular:component": { - "style": "css" - } - }, - "root": "", - "sourceRoot": "src", - "prefix": "app", - "architect": { "build": { - "builder": "@angular/build:application", - "options": { - "browser": "src/main.ts", - "tsConfig": "tsconfig.app.json", - "inlineStyleLanguage": "css", - "assets": [ - { - "glob": "**/*", - "input": "public" - } - ], - "styles": [ - "src/styles.css" - ] - }, - "configurations": { - "production": { - "budgets": [ - { - "type": "initial", - "maximumWarning": "500kB", - "maximumError": "1MB" - }, - { - "type": "anyComponentStyle", - "maximumWarning": "4kB", - "maximumError": "8kB" - } - ], - "outputHashing": "all" - }, - "development": { - "optimization": false, - "extractLicenses": false, - "sourceMap": false - } - }, - "defaultConfiguration": "production" - }, - "serve": { - "builder": "@angular/build:dev-server", - "configurations": { - "production": { - "buildTarget": "trading-dashboard:build:production" - }, - "development": { - "buildTarget": "trading-dashboard:build:development" - } - }, - "defaultConfiguration": "development" - }, - "extract-i18n": { - "builder": "@angular/build:extract-i18n" - }, "test": { - "builder": "@angular/build:karma", - "options": { - "tsConfig": "tsconfig.spec.json", - "inlineStyleLanguage": "css", - "assets": [ - { - "glob": "**/*", - "input": "public" - } - ], - "styles": [ - "src/styles.css" - ] - } - } - } - } - } -} +{ + "$schema": "./node_modules/@angular/cli/lib/config/schema.json", + "version": 1, + "cli": { + "packageManager": "npm" + }, + "newProjectRoot": "projects", + "projects": { + "trading-dashboard": { + "projectType": "application", "schematics": { + "@schematics/angular:component": { + "style": "css" + } + }, + "root": "", + "sourceRoot": "src", + "prefix": "app", + "architect": { "build": { + "builder": "@angular/build:application", + "options": { + "browser": "src/main.ts", + "tsConfig": "tsconfig.app.json", + "inlineStyleLanguage": "css", + "assets": [ + { + "glob": "**/*", + "input": "public" + } + ], + "styles": [ + "src/styles.css" + ] + }, + "configurations": { + "production": { + "budgets": [ + { + "type": "initial", + "maximumWarning": "500kB", + "maximumError": "1MB" + }, + { + "type": "anyComponentStyle", + "maximumWarning": "4kB", + "maximumError": "8kB" + } + ], + "outputHashing": "all" + }, + "development": { + "optimization": false, + "extractLicenses": false, + "sourceMap": false + } + }, + "defaultConfiguration": "production" + }, + "serve": { + "builder": "@angular/build:dev-server", + "configurations": { + "production": { + "buildTarget": "trading-dashboard:build:production" + }, + "development": { + "buildTarget": "trading-dashboard:build:development" + } + }, + "defaultConfiguration": "development" + }, + "extract-i18n": { + "builder": "@angular/build:extract-i18n" + }, "test": { + "builder": "@angular/build:karma", + "options": { + "tsConfig": "tsconfig.spec.json", + "inlineStyleLanguage": "css", + "assets": [ + { + "glob": "**/*", + "input": "public" + } + ], + "styles": [ + "src/styles.css" + ] + } + } + } + } + } +} diff --git a/apps/dashboard/package.json b/apps/dashboard/package.json index e620e45..50e70d0 100644 --- a/apps/dashboard/package.json +++ b/apps/dashboard/package.json @@ -1,44 +1,44 @@ -{ - "name": "trading-dashboard", - "version": "0.0.0", - "scripts": { - "ng": "ng", - "start": "ng serve", - "devvvv": "ng serve --port 5173 --host 0.0.0.0", - "build": "ng build", - "watch": "ng build --watch --configuration development", - "test": "ng test" - }, - "private": true, - "dependencies": { - "@angular/animations": "^20.0.0", - "@angular/cdk": "^20.0.1", - "@angular/common": "^20.0.0", - "@angular/compiler": "^20.0.0", - "@angular/core": "^20.0.0", - "@angular/forms": "^20.0.0", - "@angular/material": "^20.0.1", - "@angular/platform-browser": "^20.0.0", - "@angular/router": "^20.0.0", - "rxjs": "~7.8.2", - "tslib": "^2.8.1", - "zone.js": "~0.15.1" - }, - "devDependencies": { - "@angular/build": "^20.0.0", - "@angular/cli": "^20.0.0", - "@angular/compiler-cli": "^20.0.0", - "@tailwindcss/postcss": "^4.1.8", - "@types/jasmine": "~5.1.8", - "autoprefixer": "^10.4.21", - "jasmine-core": "~5.7.1", - "karma": "~6.4.4", - "karma-chrome-launcher": "~3.2.0", - "karma-coverage": "~2.2.1", - "karma-jasmine": "~5.1.0", - "karma-jasmine-html-reporter": "~2.1.0", - "postcss": "^8.5.4", - "tailwindcss": "^4.1.8", - "typescript": "~5.8.3" - } -} +{ + "name": "trading-dashboard", + "version": "0.0.0", + "scripts": { + "ng": "ng", + "start": "ng serve", + "devvvv": "ng serve --port 5173 --host 0.0.0.0", + "build": "ng build", + "watch": "ng build --watch --configuration development", + "test": "ng test" + }, + "private": true, + "dependencies": { + "@angular/animations": "^20.0.0", + "@angular/cdk": "^20.0.1", + "@angular/common": "^20.0.0", + "@angular/compiler": "^20.0.0", + "@angular/core": "^20.0.0", + "@angular/forms": "^20.0.0", + "@angular/material": "^20.0.1", + "@angular/platform-browser": "^20.0.0", + "@angular/router": "^20.0.0", + "rxjs": "~7.8.2", + "tslib": "^2.8.1", + "zone.js": "~0.15.1" + }, + "devDependencies": { + "@angular/build": "^20.0.0", + "@angular/cli": "^20.0.0", + "@angular/compiler-cli": "^20.0.0", + "@tailwindcss/postcss": "^4.1.8", + "@types/jasmine": "~5.1.8", + "autoprefixer": "^10.4.21", + "jasmine-core": "~5.7.1", + "karma": "~6.4.4", + "karma-chrome-launcher": "~3.2.0", + "karma-coverage": "~2.2.1", + "karma-jasmine": "~5.1.0", + "karma-jasmine-html-reporter": "~2.1.0", + "postcss": "^8.5.4", + "tailwindcss": "^4.1.8", + "typescript": "~5.8.3" + } +} diff --git a/apps/dashboard/src/app/app.config.ts b/apps/dashboard/src/app/app.config.ts index 5f0a2ef..431c70f 100644 --- a/apps/dashboard/src/app/app.config.ts +++ b/apps/dashboard/src/app/app.config.ts @@ -1,16 +1,16 @@ -import { ApplicationConfig, provideBrowserGlobalErrorListeners, provideZonelessChangeDetection } from '@angular/core'; -import { provideRouter } from '@angular/router'; -import { provideHttpClient } from '@angular/common/http'; -import { provideAnimationsAsync } from '@angular/platform-browser/animations/async'; - -import { routes } from './app.routes'; - -export const appConfig: ApplicationConfig = { - providers: [ - provideBrowserGlobalErrorListeners(), - provideZonelessChangeDetection(), - provideRouter(routes), - provideHttpClient(), - provideAnimationsAsync() - ] -}; +import { ApplicationConfig, provideBrowserGlobalErrorListeners, provideZonelessChangeDetection } from '@angular/core'; +import { provideRouter } from '@angular/router'; +import { provideHttpClient } from '@angular/common/http'; +import { provideAnimationsAsync } from '@angular/platform-browser/animations/async'; + +import { routes } from './app.routes'; + +export const appConfig: ApplicationConfig = { + providers: [ + provideBrowserGlobalErrorListeners(), + provideZonelessChangeDetection(), + provideRouter(routes), + provideHttpClient(), + provideAnimationsAsync() + ] +}; diff --git a/apps/dashboard/src/app/app.css b/apps/dashboard/src/app/app.css index 3a53335..a18237f 100644 --- a/apps/dashboard/src/app/app.css +++ b/apps/dashboard/src/app/app.css @@ -1,174 +1,174 @@ -/* Custom Angular Material integration styles */ - -/* Sidenav styles */ -.mat-sidenav-container { - background-color: transparent; -} - -.mat-sidenav { - border-radius: 0; - width: 16rem; - background-color: white !important; - border-right: 1px solid #e5e7eb !important; -} - -/* Toolbar styles */ -.mat-toolbar { - background-color: white; - color: #374151; - box-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); -} - -/* Button styles */ -.mat-mdc-button.nav-button { - width: 100%; - text-align: left; - justify-content: flex-start; - padding: 0.75rem 1rem; - border-radius: 0.375rem; - margin-bottom: 0.25rem; - background-color: transparent; - transition: background-color 0.15s ease-in-out; -} - -.mat-mdc-button.nav-button:hover { - background-color: #f3f4f6; -} - -.mat-mdc-button.nav-button.bg-blue-50 { - background-color: #eff6ff !important; - color: #1d4ed8 !important; -} - -/* Card styles */ -.mat-mdc-card { - border-radius: 0.5rem; - box-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); - border: 1px solid #f3f4f6; - background-color: white !important; -} - -/* Tab styles */ -.mat-mdc-tab-group .mat-mdc-tab-header { - border-bottom: 1px solid #e5e7eb; -} - -.mat-mdc-tab-label { - color: #6b7280; -} - -.mat-mdc-tab-label:hover { - color: #111827; -} - -.mat-mdc-tab-label-active { - color: #2563eb; - font-weight: 500; -} - -/* Chip styles for status indicators */ -.mat-mdc-chip-set .mat-mdc-chip { - background-color: white; - border: 1px solid #e5e7eb; -} - -.chip-green { - background-color: #dcfce7 !important; - color: #166534 !important; - border: 1px solid #bbf7d0 !important; -} - -.chip-blue { - background-color: #dbeafe !important; - color: #1e40af !important; - border: 1px solid #bfdbfe !important; -} - -.status-chip-active { - background-color: #dcfce7; - color: #166534; - padding: 0.25rem 0.5rem; - border-radius: 9999px; - font-size: 0.75rem; - font-weight: 500; - display: inline-block; -} - -.status-chip-medium { - background-color: #dbeafe; - color: #1e40af; - padding: 0.25rem 0.5rem; - border-radius: 9999px; - font-size: 0.75rem; - font-weight: 500; -} - -/* Table styles */ -.mat-mdc-table { - border-radius: 0.5rem; - overflow: hidden; - border: 1px solid #f3f4f6; - background-color: white; -} - -.mat-mdc-header-row { - background-color: #f9fafb; -} - -.mat-mdc-header-cell { - font-weight: 500; - color: #374151; - font-size: 0.875rem; -} - -.mat-mdc-cell { - color: #111827; - font-size: 0.875rem; - padding: 1rem 0; -} - -.mat-mdc-row:hover { - background-color: #f9fafb; - transition: background-color 0.15s ease; -} - -/* Custom utility classes for the dashboard */ -.portfolio-card { - background-color: white !important; - border-radius: 0.5rem; - box-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); - border: 1px solid #f3f4f6; - padding: 1.5rem; -} - -.metric-value { - font-size: 1.5rem; - font-weight: 700; -} - -.metric-label { - font-size: 0.875rem; - color: #6b7280; - margin-top: 0.25rem; -} - -.metric-change-positive { - color: #16a34a; - font-weight: 500; -} - -.metric-change-negative { - color: #dc2626; - font-weight: 500; -} - -/* Responsive styles */ -@media (max-width: 768px) { - .mat-sidenav { - width: 100%; - } - - .hide-mobile { - display: none; - } -} +/* Custom Angular Material integration styles */ + +/* Sidenav styles */ +.mat-sidenav-container { + background-color: transparent; +} + +.mat-sidenav { + border-radius: 0; + width: 16rem; + background-color: white !important; + border-right: 1px solid #e5e7eb !important; +} + +/* Toolbar styles */ +.mat-toolbar { + background-color: white; + color: #374151; + box-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); +} + +/* Button styles */ +.mat-mdc-button.nav-button { + width: 100%; + text-align: left; + justify-content: flex-start; + padding: 0.75rem 1rem; + border-radius: 0.375rem; + margin-bottom: 0.25rem; + background-color: transparent; + transition: background-color 0.15s ease-in-out; +} + +.mat-mdc-button.nav-button:hover { + background-color: #f3f4f6; +} + +.mat-mdc-button.nav-button.bg-blue-50 { + background-color: #eff6ff !important; + color: #1d4ed8 !important; +} + +/* Card styles */ +.mat-mdc-card { + border-radius: 0.5rem; + box-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); + border: 1px solid #f3f4f6; + background-color: white !important; +} + +/* Tab styles */ +.mat-mdc-tab-group .mat-mdc-tab-header { + border-bottom: 1px solid #e5e7eb; +} + +.mat-mdc-tab-label { + color: #6b7280; +} + +.mat-mdc-tab-label:hover { + color: #111827; +} + +.mat-mdc-tab-label-active { + color: #2563eb; + font-weight: 500; +} + +/* Chip styles for status indicators */ +.mat-mdc-chip-set .mat-mdc-chip { + background-color: white; + border: 1px solid #e5e7eb; +} + +.chip-green { + background-color: #dcfce7 !important; + color: #166534 !important; + border: 1px solid #bbf7d0 !important; +} + +.chip-blue { + background-color: #dbeafe !important; + color: #1e40af !important; + border: 1px solid #bfdbfe !important; +} + +.status-chip-active { + background-color: #dcfce7; + color: #166534; + padding: 0.25rem 0.5rem; + border-radius: 9999px; + font-size: 0.75rem; + font-weight: 500; + display: inline-block; +} + +.status-chip-medium { + background-color: #dbeafe; + color: #1e40af; + padding: 0.25rem 0.5rem; + border-radius: 9999px; + font-size: 0.75rem; + font-weight: 500; +} + +/* Table styles */ +.mat-mdc-table { + border-radius: 0.5rem; + overflow: hidden; + border: 1px solid #f3f4f6; + background-color: white; +} + +.mat-mdc-header-row { + background-color: #f9fafb; +} + +.mat-mdc-header-cell { + font-weight: 500; + color: #374151; + font-size: 0.875rem; +} + +.mat-mdc-cell { + color: #111827; + font-size: 0.875rem; + padding: 1rem 0; +} + +.mat-mdc-row:hover { + background-color: #f9fafb; + transition: background-color 0.15s ease; +} + +/* Custom utility classes for the dashboard */ +.portfolio-card { + background-color: white !important; + border-radius: 0.5rem; + box-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); + border: 1px solid #f3f4f6; + padding: 1.5rem; +} + +.metric-value { + font-size: 1.5rem; + font-weight: 700; +} + +.metric-label { + font-size: 0.875rem; + color: #6b7280; + margin-top: 0.25rem; +} + +.metric-change-positive { + color: #16a34a; + font-weight: 500; +} + +.metric-change-negative { + color: #dc2626; + font-weight: 500; +} + +/* Responsive styles */ +@media (max-width: 768px) { + .mat-sidenav { + width: 100%; + } + + .hide-mobile { + display: none; + } +} diff --git a/apps/dashboard/src/app/app.html b/apps/dashboard/src/app/app.html index 5ee5ca3..d5fd945 100644 --- a/apps/dashboard/src/app/app.html +++ b/apps/dashboard/src/app/app.html @@ -1,67 +1,67 @@ - -
- - - - -
- - - {{ title }} - - - - - - -
- -
-
-
- - + +
+ + + + +
+ + + {{ title }} + + + + + + +
+ +
+
+
+ + diff --git a/apps/dashboard/src/app/app.routes.ts b/apps/dashboard/src/app/app.routes.ts index 7f33c8f..a7c2c2e 100644 --- a/apps/dashboard/src/app/app.routes.ts +++ b/apps/dashboard/src/app/app.routes.ts @@ -1,18 +1,18 @@ -import { Routes } from '@angular/router'; -import { DashboardComponent } from './pages/dashboard/dashboard.component'; -import { MarketDataComponent } from './pages/market-data/market-data.component'; -import { PortfolioComponent } from './pages/portfolio/portfolio.component'; -import { StrategiesComponent } from './pages/strategies/strategies.component'; -import { RiskManagementComponent } from './pages/risk-management/risk-management.component'; -import { SettingsComponent } from './pages/settings/settings.component'; - -export const routes: Routes = [ - { path: '', redirectTo: '/dashboard', pathMatch: 'full' }, - { path: 'dashboard', component: DashboardComponent }, - { path: 'market-data', component: MarketDataComponent }, - { path: 'portfolio', component: PortfolioComponent }, - { path: 'strategies', component: StrategiesComponent }, - { path: 'risk-management', component: RiskManagementComponent }, - { path: 'settings', component: SettingsComponent }, - { path: '**', redirectTo: '/dashboard' } -]; +import { Routes } from '@angular/router'; +import { DashboardComponent } from './pages/dashboard/dashboard.component'; +import { MarketDataComponent } from './pages/market-data/market-data.component'; +import { PortfolioComponent } from './pages/portfolio/portfolio.component'; +import { StrategiesComponent } from './pages/strategies/strategies.component'; +import { RiskManagementComponent } from './pages/risk-management/risk-management.component'; +import { SettingsComponent } from './pages/settings/settings.component'; + +export const routes: Routes = [ + { path: '', redirectTo: '/dashboard', pathMatch: 'full' }, + { path: 'dashboard', component: DashboardComponent }, + { path: 'market-data', component: MarketDataComponent }, + { path: 'portfolio', component: PortfolioComponent }, + { path: 'strategies', component: StrategiesComponent }, + { path: 'risk-management', component: RiskManagementComponent }, + { path: 'settings', component: SettingsComponent }, + { path: '**', redirectTo: '/dashboard' } +]; diff --git a/apps/dashboard/src/app/app.spec.ts b/apps/dashboard/src/app/app.spec.ts index b02fcd2..a46cf11 100644 --- a/apps/dashboard/src/app/app.spec.ts +++ b/apps/dashboard/src/app/app.spec.ts @@ -1,25 +1,25 @@ -import { provideZonelessChangeDetection } from '@angular/core'; -import { TestBed } from '@angular/core/testing'; -import { App } from './app'; - -describe('App', () => { - beforeEach(async () => { - await TestBed.configureTestingModule({ - imports: [App], - providers: [provideZonelessChangeDetection()] - }).compileComponents(); - }); - - it('should create the app', () => { - const fixture = TestBed.createComponent(App); - const app = fixture.componentInstance; - expect(app).toBeTruthy(); - }); - - it('should render title', () => { - const fixture = TestBed.createComponent(App); - fixture.detectChanges(); - const compiled = fixture.nativeElement as HTMLElement; - expect(compiled.querySelector('h1')?.textContent).toContain('Hello, trading-dashboard'); - }); -}); +import { provideZonelessChangeDetection } from '@angular/core'; +import { TestBed } from '@angular/core/testing'; +import { App } from './app'; + +describe('App', () => { + beforeEach(async () => { + await TestBed.configureTestingModule({ + imports: [App], + providers: [provideZonelessChangeDetection()] + }).compileComponents(); + }); + + it('should create the app', () => { + const fixture = TestBed.createComponent(App); + const app = fixture.componentInstance; + expect(app).toBeTruthy(); + }); + + it('should render title', () => { + const fixture = TestBed.createComponent(App); + fixture.detectChanges(); + const compiled = fixture.nativeElement as HTMLElement; + expect(compiled.querySelector('h1')?.textContent).toContain('Hello, trading-dashboard'); + }); +}); diff --git a/apps/dashboard/src/app/app.ts b/apps/dashboard/src/app/app.ts index 8e869c2..ef1aa48 100644 --- a/apps/dashboard/src/app/app.ts +++ b/apps/dashboard/src/app/app.ts @@ -1,40 +1,40 @@ -import { Component, signal } from '@angular/core'; -import { RouterOutlet } from '@angular/router'; -import { CommonModule } from '@angular/common'; -import { MatSidenavModule } from '@angular/material/sidenav'; -import { MatToolbarModule } from '@angular/material/toolbar'; -import { MatButtonModule } from '@angular/material/button'; -import { MatIconModule } from '@angular/material/icon'; -import { MatChipsModule } from '@angular/material/chips'; -import { SidebarComponent } from './components/sidebar/sidebar.component'; -import { NotificationsComponent } from './components/notifications/notifications'; - -@Component({ - selector: 'app-root', - imports: [ - RouterOutlet, - CommonModule, - MatSidenavModule, - MatToolbarModule, - MatButtonModule, - MatIconModule, - MatChipsModule, - SidebarComponent, - NotificationsComponent - ], - templateUrl: './app.html', - styleUrl: './app.css' -}) -export class App { - protected title = 'Trading Dashboard'; - protected sidenavOpened = signal(true); - - toggleSidenav() { - this.sidenavOpened.set(!this.sidenavOpened()); - } - - onNavigationClick(route: string) { - // Handle navigation if needed - console.log('Navigating to:', route); - } -} +import { Component, signal } from '@angular/core'; +import { RouterOutlet } from '@angular/router'; +import { CommonModule } from '@angular/common'; +import { MatSidenavModule } from '@angular/material/sidenav'; +import { MatToolbarModule } from '@angular/material/toolbar'; +import { MatButtonModule } from '@angular/material/button'; +import { MatIconModule } from '@angular/material/icon'; +import { MatChipsModule } from '@angular/material/chips'; +import { SidebarComponent } from './components/sidebar/sidebar.component'; +import { NotificationsComponent } from './components/notifications/notifications'; + +@Component({ + selector: 'app-root', + imports: [ + RouterOutlet, + CommonModule, + MatSidenavModule, + MatToolbarModule, + MatButtonModule, + MatIconModule, + MatChipsModule, + SidebarComponent, + NotificationsComponent + ], + templateUrl: './app.html', + styleUrl: './app.css' +}) +export class App { + protected title = 'Trading Dashboard'; + protected sidenavOpened = signal(true); + + toggleSidenav() { + this.sidenavOpened.set(!this.sidenavOpened()); + } + + onNavigationClick(route: string) { + // Handle navigation if needed + console.log('Navigating to:', route); + } +} diff --git a/apps/dashboard/src/app/components/notifications/notifications.css b/apps/dashboard/src/app/components/notifications/notifications.css index c16e6e1..21e9a8a 100644 --- a/apps/dashboard/src/app/components/notifications/notifications.css +++ b/apps/dashboard/src/app/components/notifications/notifications.css @@ -1,45 +1,45 @@ -::ng-deep .notification-menu { - width: 380px; - max-width: 90vw; -} - -.notification-header { - padding: 12px 16px !important; - height: auto !important; - line-height: normal !important; -} - -.notification-empty { - padding: 16px !important; - height: auto !important; - line-height: normal !important; -} - -.notification-item { - padding: 12px 16px !important; - height: auto !important; - line-height: normal !important; - white-space: normal !important; - border-left: 3px solid transparent; - transition: all 0.2s ease; -} - -.notification-item:hover { - background-color: #f5f5f5; -} - -.notification-item.unread { - background-color: #f0f9ff; - border-left-color: #0ea5e9; -} - -.notification-item.unread .font-medium { - font-weight: 600; -} - -.line-clamp-2 { - display: -webkit-box; - -webkit-line-clamp: 2; - -webkit-box-orient: vertical; - overflow: hidden; +::ng-deep .notification-menu { + width: 380px; + max-width: 90vw; +} + +.notification-header { + padding: 12px 16px !important; + height: auto !important; + line-height: normal !important; +} + +.notification-empty { + padding: 16px !important; + height: auto !important; + line-height: normal !important; +} + +.notification-item { + padding: 12px 16px !important; + height: auto !important; + line-height: normal !important; + white-space: normal !important; + border-left: 3px solid transparent; + transition: all 0.2s ease; +} + +.notification-item:hover { + background-color: #f5f5f5; +} + +.notification-item.unread { + background-color: #f0f9ff; + border-left-color: #0ea5e9; +} + +.notification-item.unread .font-medium { + font-weight: 600; +} + +.line-clamp-2 { + display: -webkit-box; + -webkit-line-clamp: 2; + -webkit-box-orient: vertical; + overflow: hidden; } \ No newline at end of file diff --git a/apps/dashboard/src/app/components/notifications/notifications.html b/apps/dashboard/src/app/components/notifications/notifications.html index e85f512..11648e6 100644 --- a/apps/dashboard/src/app/components/notifications/notifications.html +++ b/apps/dashboard/src/app/components/notifications/notifications.html @@ -1,75 +1,75 @@ - - - -
-
- Notifications - @if (notifications.length > 0) { -
- - -
- } -
-
- - - - @if (notifications.length === 0) { -
-
- notifications_none -

No notifications

-
-
- } @else { - @for (notification of notifications.slice(0, 5); track notification.id) { -
-
- - {{ getNotificationIcon(notification.type) }} - -
-
-

{{ notification.title }}

- -
-

{{ notification.message }}

-

{{ formatTime(notification.timestamp) }}

-
-
-
- @if (!$last) { - - } - } - - @if (notifications.length > 5) { - -
- {{ notifications.length - 5 }} more notifications... -
- } - } -
+ + + +
+
+ Notifications + @if (notifications.length > 0) { +
+ + +
+ } +
+
+ + + + @if (notifications.length === 0) { +
+
+ notifications_none +

No notifications

+
+
+ } @else { + @for (notification of notifications.slice(0, 5); track notification.id) { +
+
+ + {{ getNotificationIcon(notification.type) }} + +
+
+

{{ notification.title }}

+ +
+

{{ notification.message }}

+

{{ formatTime(notification.timestamp) }}

+
+
+
+ @if (!$last) { + + } + } + + @if (notifications.length > 5) { + +
+ {{ notifications.length - 5 }} more notifications... +
+ } + } +
diff --git a/apps/dashboard/src/app/components/notifications/notifications.ts b/apps/dashboard/src/app/components/notifications/notifications.ts index 36ce2db..9b1270b 100644 --- a/apps/dashboard/src/app/components/notifications/notifications.ts +++ b/apps/dashboard/src/app/components/notifications/notifications.ts @@ -1,86 +1,86 @@ -import { Component, inject } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatIconModule } from '@angular/material/icon'; -import { MatButtonModule } from '@angular/material/button'; -import { MatBadgeModule } from '@angular/material/badge'; -import { MatMenuModule } from '@angular/material/menu'; -import { MatListModule } from '@angular/material/list'; -import { MatDividerModule } from '@angular/material/divider'; -import { NotificationService, Notification } from '../../services/notification.service'; - -@Component({ - selector: 'app-notifications', - imports: [ - CommonModule, - MatIconModule, - MatButtonModule, - MatBadgeModule, - MatMenuModule, - MatListModule, - MatDividerModule - ], - templateUrl: './notifications.html', - styleUrl: './notifications.css' -}) -export class NotificationsComponent { - private notificationService = inject(NotificationService); - - get notifications() { - return this.notificationService.notifications(); - } - - get unreadCount() { - return this.notificationService.unreadCount(); - } - - markAsRead(notification: Notification) { - this.notificationService.markAsRead(notification.id); - } - - markAllAsRead() { - this.notificationService.markAllAsRead(); - } - - clearNotification(notification: Notification) { - this.notificationService.clearNotification(notification.id); - } - - clearAll() { - this.notificationService.clearAllNotifications(); - } - - getNotificationIcon(type: string): string { - switch (type) { - case 'error': return 'error'; - case 'warning': return 'warning'; - case 'success': return 'check_circle'; - case 'info': - default: return 'info'; - } - } - - getNotificationColor(type: string): string { - switch (type) { - case 'error': return 'text-red-600'; - case 'warning': return 'text-yellow-600'; - case 'success': return 'text-green-600'; - case 'info': - default: return 'text-blue-600'; - } - } - - formatTime(timestamp: Date): string { - const now = new Date(); - const diff = now.getTime() - timestamp.getTime(); - const minutes = Math.floor(diff / 60000); - - if (minutes < 1) return 'Just now'; - if (minutes < 60) return `${minutes}m ago`; - - const hours = Math.floor(minutes / 60); - if (hours < 24) return `${hours}h ago`; - - const days = Math.floor(hours / 24); - return `${days}d ago`; - } -} +import { Component, inject } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatIconModule } from '@angular/material/icon'; +import { MatButtonModule } from '@angular/material/button'; +import { MatBadgeModule } from '@angular/material/badge'; +import { MatMenuModule } from '@angular/material/menu'; +import { MatListModule } from '@angular/material/list'; +import { MatDividerModule } from '@angular/material/divider'; +import { NotificationService, Notification } from '../../services/notification.service'; + +@Component({ + selector: 'app-notifications', + imports: [ + CommonModule, + MatIconModule, + MatButtonModule, + MatBadgeModule, + MatMenuModule, + MatListModule, + MatDividerModule + ], + templateUrl: './notifications.html', + styleUrl: './notifications.css' +}) +export class NotificationsComponent { + private notificationService = inject(NotificationService); + + get notifications() { + return this.notificationService.notifications(); + } + + get unreadCount() { + return this.notificationService.unreadCount(); + } + + markAsRead(notification: Notification) { + this.notificationService.markAsRead(notification.id); + } + + markAllAsRead() { + this.notificationService.markAllAsRead(); + } + + clearNotification(notification: Notification) { + this.notificationService.clearNotification(notification.id); + } + + clearAll() { + this.notificationService.clearAllNotifications(); + } + + getNotificationIcon(type: string): string { + switch (type) { + case 'error': return 'error'; + case 'warning': return 'warning'; + case 'success': return 'check_circle'; + case 'info': + default: return 'info'; + } + } + + getNotificationColor(type: string): string { + switch (type) { + case 'error': return 'text-red-600'; + case 'warning': return 'text-yellow-600'; + case 'success': return 'text-green-600'; + case 'info': + default: return 'text-blue-600'; + } + } + + formatTime(timestamp: Date): string { + const now = new Date(); + const diff = now.getTime() - timestamp.getTime(); + const minutes = Math.floor(diff / 60000); + + if (minutes < 1) return 'Just now'; + if (minutes < 60) return `${minutes}m ago`; + + const hours = Math.floor(minutes / 60); + if (hours < 24) return `${hours}h ago`; + + const days = Math.floor(hours / 24); + return `${days}d ago`; + } +} diff --git a/apps/dashboard/src/app/components/sidebar/sidebar.component.css b/apps/dashboard/src/app/components/sidebar/sidebar.component.css index 74e52cf..7a4c953 100644 --- a/apps/dashboard/src/app/components/sidebar/sidebar.component.css +++ b/apps/dashboard/src/app/components/sidebar/sidebar.component.css @@ -1,38 +1,38 @@ -/* Sidebar specific styles */ -.sidebar { - position: fixed; - top: 0; - left: 0; - width: 16rem; /* 256px */ - height: 100vh; - background-color: white; - border-right: 1px solid #e5e7eb; - transform: translateX(0); - transition: transform 0.3s ease-in-out; - z-index: 1000; - overflow-y: auto; -} - -.sidebar-closed { - transform: translateX(-100%); -} - -.nav-button { - width: 100%; - text-align: left; - justify-content: flex-start; - padding: 0.75rem 1rem; - border-radius: 0.375rem; - margin-bottom: 0.25rem; - background-color: transparent; - transition: background-color 0.15s ease-in-out; -} - -.nav-button:hover { - background-color: #f3f4f6; -} - -.nav-button.bg-blue-50 { - background-color: #eff6ff !important; - color: #1d4ed8 !important; -} +/* Sidebar specific styles */ +.sidebar { + position: fixed; + top: 0; + left: 0; + width: 16rem; /* 256px */ + height: 100vh; + background-color: white; + border-right: 1px solid #e5e7eb; + transform: translateX(0); + transition: transform 0.3s ease-in-out; + z-index: 1000; + overflow-y: auto; +} + +.sidebar-closed { + transform: translateX(-100%); +} + +.nav-button { + width: 100%; + text-align: left; + justify-content: flex-start; + padding: 0.75rem 1rem; + border-radius: 0.375rem; + margin-bottom: 0.25rem; + background-color: transparent; + transition: background-color 0.15s ease-in-out; +} + +.nav-button:hover { + background-color: #f3f4f6; +} + +.nav-button.bg-blue-50 { + background-color: #eff6ff !important; + color: #1d4ed8 !important; +} diff --git a/apps/dashboard/src/app/components/sidebar/sidebar.component.html b/apps/dashboard/src/app/components/sidebar/sidebar.component.html index bd25310..ea6bfae 100644 --- a/apps/dashboard/src/app/components/sidebar/sidebar.component.html +++ b/apps/dashboard/src/app/components/sidebar/sidebar.component.html @@ -1,30 +1,30 @@ - - + + diff --git a/apps/dashboard/src/app/components/sidebar/sidebar.component.ts b/apps/dashboard/src/app/components/sidebar/sidebar.component.ts index 3ae3211..8000923 100644 --- a/apps/dashboard/src/app/components/sidebar/sidebar.component.ts +++ b/apps/dashboard/src/app/components/sidebar/sidebar.component.ts @@ -1,61 +1,61 @@ -import { Component, input, output } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatSidenavModule } from '@angular/material/sidenav'; -import { MatButtonModule } from '@angular/material/button'; -import { MatIconModule } from '@angular/material/icon'; -import { Router, NavigationEnd } from '@angular/router'; -import { filter } from 'rxjs/operators'; - -export interface NavigationItem { - label: string; - icon: string; - route: string; - active?: boolean; -} - -@Component({ - selector: 'app-sidebar', - standalone: true, - imports: [ - CommonModule, - MatSidenavModule, - MatButtonModule, - MatIconModule - ], - templateUrl: './sidebar.component.html', - styleUrl: './sidebar.component.css' -}) -export class SidebarComponent { - opened = input(true); - navigationItemClick = output(); - - protected navigationItems: NavigationItem[] = [ - { label: 'Dashboard', icon: 'dashboard', route: '/dashboard', active: true }, - { label: 'Market Data', icon: 'trending_up', route: '/market-data' }, - { label: 'Portfolio', icon: 'account_balance_wallet', route: '/portfolio' }, - { label: 'Strategies', icon: 'psychology', route: '/strategies' }, - { label: 'Risk Management', icon: 'security', route: '/risk-management' }, - { label: 'Settings', icon: 'settings', route: '/settings' } - ]; - - constructor(private router: Router) { - // Listen to route changes to update active state - this.router.events.pipe( - filter(event => event instanceof NavigationEnd) - ).subscribe((event: NavigationEnd) => { - this.updateActiveRoute(event.urlAfterRedirects); - }); - } - - onNavigationClick(route: string) { - this.navigationItemClick.emit(route); - this.router.navigate([route]); - this.updateActiveRoute(route); - } - - private updateActiveRoute(currentRoute: string) { - this.navigationItems.forEach(item => { - item.active = item.route === currentRoute; - }); - } -} +import { Component, input, output } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatSidenavModule } from '@angular/material/sidenav'; +import { MatButtonModule } from '@angular/material/button'; +import { MatIconModule } from '@angular/material/icon'; +import { Router, NavigationEnd } from '@angular/router'; +import { filter } from 'rxjs/operators'; + +export interface NavigationItem { + label: string; + icon: string; + route: string; + active?: boolean; +} + +@Component({ + selector: 'app-sidebar', + standalone: true, + imports: [ + CommonModule, + MatSidenavModule, + MatButtonModule, + MatIconModule + ], + templateUrl: './sidebar.component.html', + styleUrl: './sidebar.component.css' +}) +export class SidebarComponent { + opened = input(true); + navigationItemClick = output(); + + protected navigationItems: NavigationItem[] = [ + { label: 'Dashboard', icon: 'dashboard', route: '/dashboard', active: true }, + { label: 'Market Data', icon: 'trending_up', route: '/market-data' }, + { label: 'Portfolio', icon: 'account_balance_wallet', route: '/portfolio' }, + { label: 'Strategies', icon: 'psychology', route: '/strategies' }, + { label: 'Risk Management', icon: 'security', route: '/risk-management' }, + { label: 'Settings', icon: 'settings', route: '/settings' } + ]; + + constructor(private router: Router) { + // Listen to route changes to update active state + this.router.events.pipe( + filter(event => event instanceof NavigationEnd) + ).subscribe((event: NavigationEnd) => { + this.updateActiveRoute(event.urlAfterRedirects); + }); + } + + onNavigationClick(route: string) { + this.navigationItemClick.emit(route); + this.router.navigate([route]); + this.updateActiveRoute(route); + } + + private updateActiveRoute(currentRoute: string) { + this.navigationItems.forEach(item => { + item.active = item.route === currentRoute; + }); + } +} diff --git a/apps/dashboard/src/app/pages/dashboard/dashboard.component.css b/apps/dashboard/src/app/pages/dashboard/dashboard.component.css index 714e6ba..084987e 100644 --- a/apps/dashboard/src/app/pages/dashboard/dashboard.component.css +++ b/apps/dashboard/src/app/pages/dashboard/dashboard.component.css @@ -1,48 +1,48 @@ -/* Dashboard specific styles */ -.portfolio-card { - background-color: white !important; - border-radius: 0.5rem; - box-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); - border: 1px solid #f3f4f6; - padding: 1.5rem; -} - -.metric-value { - font-size: 1.5rem; - font-weight: 700; -} - -.metric-label { - font-size: 0.875rem; - color: #6b7280; - margin-top: 0.25rem; -} - -.metric-change-positive { - color: #16a34a; - font-weight: 500; -} - -.metric-change-negative { - color: #dc2626; - font-weight: 500; -} - -.status-chip-active { - background-color: #dcfce7; - color: #166534; - padding: 0.25rem 0.5rem; - border-radius: 9999px; - font-size: 0.75rem; - font-weight: 500; - display: inline-block; -} - -.status-chip-medium { - background-color: #dbeafe; - color: #1e40af; - padding: 0.25rem 0.5rem; - border-radius: 9999px; - font-size: 0.75rem; - font-weight: 500; -} +/* Dashboard specific styles */ +.portfolio-card { + background-color: white !important; + border-radius: 0.5rem; + box-shadow: 0 1px 2px 0 rgb(0 0 0 / 0.05); + border: 1px solid #f3f4f6; + padding: 1.5rem; +} + +.metric-value { + font-size: 1.5rem; + font-weight: 700; +} + +.metric-label { + font-size: 0.875rem; + color: #6b7280; + margin-top: 0.25rem; +} + +.metric-change-positive { + color: #16a34a; + font-weight: 500; +} + +.metric-change-negative { + color: #dc2626; + font-weight: 500; +} + +.status-chip-active { + background-color: #dcfce7; + color: #166534; + padding: 0.25rem 0.5rem; + border-radius: 9999px; + font-size: 0.75rem; + font-weight: 500; + display: inline-block; +} + +.status-chip-medium { + background-color: #dbeafe; + color: #1e40af; + padding: 0.25rem 0.5rem; + border-radius: 9999px; + font-size: 0.75rem; + font-weight: 500; +} diff --git a/apps/dashboard/src/app/pages/dashboard/dashboard.component.html b/apps/dashboard/src/app/pages/dashboard/dashboard.component.html index 5a5d329..d21a7f6 100644 --- a/apps/dashboard/src/app/pages/dashboard/dashboard.component.html +++ b/apps/dashboard/src/app/pages/dashboard/dashboard.component.html @@ -1,154 +1,154 @@ - -
- - -
-
-

Portfolio Value

-

- ${{ portfolioValue().toLocaleString('en-US', {minimumFractionDigits: 2}) }} -

-
- account_balance_wallet -
-
- - - -
-
-

Day Change

-

- ${{ dayChange().toLocaleString('en-US', {minimumFractionDigits: 2}) }} -

-

- {{ dayChangePercent() > 0 ? '+' : '' }}{{ dayChangePercent().toFixed(2) }}% -

-
- - {{ dayChange() > 0 ? 'trending_up' : 'trending_down' }} - -
-
- - - -
-
-

Active Strategies

-

3

- Running -
- psychology -
-
- - - -
-
-

Risk Level

-

Low

- Moderate -
- security -
-
-
- - - -
-

Market Watchlist

- -
- -
- - - - - - - - - - - - - - - - - - - - - - - - - - - -
Symbol{{ stock.symbol }}Price - ${{ stock.price.toFixed(2) }} - Change - {{ stock.change > 0 ? '+' : '' }}${{ stock.change.toFixed(2) }} - Change % - {{ stock.changePercent > 0 ? '+' : '' }}{{ stock.changePercent.toFixed(2) }}% -
-
-
- - - - -
- -
- show_chart -

Chart visualization will be implemented here

-
-
-
-
- - -
- -
- receipt_long -

Order history and management will be implemented here

-
-
-
-
- - -
- -
- analytics -

Advanced analytics and performance metrics will be implemented here

-
-
-
-
-
+ +
+ + +
+
+

Portfolio Value

+

+ ${{ portfolioValue().toLocaleString('en-US', {minimumFractionDigits: 2}) }} +

+
+ account_balance_wallet +
+
+ + + +
+
+

Day Change

+

+ ${{ dayChange().toLocaleString('en-US', {minimumFractionDigits: 2}) }} +

+

+ {{ dayChangePercent() > 0 ? '+' : '' }}{{ dayChangePercent().toFixed(2) }}% +

+
+ + {{ dayChange() > 0 ? 'trending_up' : 'trending_down' }} + +
+
+ + + +
+
+

Active Strategies

+

3

+ Running +
+ psychology +
+
+ + + +
+
+

Risk Level

+

Low

+ Moderate +
+ security +
+
+
+ + + +
+

Market Watchlist

+ +
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
Symbol{{ stock.symbol }}Price + ${{ stock.price.toFixed(2) }} + Change + {{ stock.change > 0 ? '+' : '' }}${{ stock.change.toFixed(2) }} + Change % + {{ stock.changePercent > 0 ? '+' : '' }}{{ stock.changePercent.toFixed(2) }}% +
+
+
+ + + + +
+ +
+ show_chart +

Chart visualization will be implemented here

+
+
+
+
+ + +
+ +
+ receipt_long +

Order history and management will be implemented here

+
+
+
+
+ + +
+ +
+ analytics +

Advanced analytics and performance metrics will be implemented here

+
+
+
+
+
diff --git a/apps/dashboard/src/app/pages/dashboard/dashboard.component.ts b/apps/dashboard/src/app/pages/dashboard/dashboard.component.ts index 41044bc..fa02a2a 100644 --- a/apps/dashboard/src/app/pages/dashboard/dashboard.component.ts +++ b/apps/dashboard/src/app/pages/dashboard/dashboard.component.ts @@ -1,44 +1,44 @@ -import { Component, signal } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatCardModule } from '@angular/material/card'; -import { MatTabsModule } from '@angular/material/tabs'; -import { MatButtonModule } from '@angular/material/button'; -import { MatIconModule } from '@angular/material/icon'; -import { MatTableModule } from '@angular/material/table'; - -export interface MarketDataItem { - symbol: string; - price: number; - change: number; - changePercent: number; -} - -@Component({ - selector: 'app-dashboard', - standalone: true, - imports: [ - CommonModule, - MatCardModule, - MatTabsModule, - MatButtonModule, - MatIconModule, - MatTableModule - ], - templateUrl: './dashboard.component.html', - styleUrl: './dashboard.component.css' -}) -export class DashboardComponent { - // Mock data for the dashboard - protected marketData = signal([ - { symbol: 'AAPL', price: 192.53, change: 2.41, changePercent: 1.27 }, - { symbol: 'GOOGL', price: 138.21, change: -1.82, changePercent: -1.30 }, - { symbol: 'MSFT', price: 378.85, change: 4.12, changePercent: 1.10 }, - { symbol: 'TSLA', price: 248.42, change: -3.21, changePercent: -1.28 }, - ]); - - protected portfolioValue = signal(125420.50); - protected dayChange = signal(2341.20); - protected dayChangePercent = signal(1.90); - - protected displayedColumns: string[] = ['symbol', 'price', 'change', 'changePercent']; -} +import { Component, signal } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatCardModule } from '@angular/material/card'; +import { MatTabsModule } from '@angular/material/tabs'; +import { MatButtonModule } from '@angular/material/button'; +import { MatIconModule } from '@angular/material/icon'; +import { MatTableModule } from '@angular/material/table'; + +export interface MarketDataItem { + symbol: string; + price: number; + change: number; + changePercent: number; +} + +@Component({ + selector: 'app-dashboard', + standalone: true, + imports: [ + CommonModule, + MatCardModule, + MatTabsModule, + MatButtonModule, + MatIconModule, + MatTableModule + ], + templateUrl: './dashboard.component.html', + styleUrl: './dashboard.component.css' +}) +export class DashboardComponent { + // Mock data for the dashboard + protected marketData = signal([ + { symbol: 'AAPL', price: 192.53, change: 2.41, changePercent: 1.27 }, + { symbol: 'GOOGL', price: 138.21, change: -1.82, changePercent: -1.30 }, + { symbol: 'MSFT', price: 378.85, change: 4.12, changePercent: 1.10 }, + { symbol: 'TSLA', price: 248.42, change: -3.21, changePercent: -1.28 }, + ]); + + protected portfolioValue = signal(125420.50); + protected dayChange = signal(2341.20); + protected dayChangePercent = signal(1.90); + + protected displayedColumns: string[] = ['symbol', 'price', 'change', 'changePercent']; +} diff --git a/apps/dashboard/src/app/pages/market-data/market-data.component.css b/apps/dashboard/src/app/pages/market-data/market-data.component.css index 5fd9b2e..3330a6c 100644 --- a/apps/dashboard/src/app/pages/market-data/market-data.component.css +++ b/apps/dashboard/src/app/pages/market-data/market-data.component.css @@ -1 +1 @@ -/* Market Data specific styles */ +/* Market Data specific styles */ diff --git a/apps/dashboard/src/app/pages/market-data/market-data.component.html b/apps/dashboard/src/app/pages/market-data/market-data.component.html index 9b35668..c8e1320 100644 --- a/apps/dashboard/src/app/pages/market-data/market-data.component.html +++ b/apps/dashboard/src/app/pages/market-data/market-data.component.html @@ -1,172 +1,172 @@ -
-
-
-

Market Data

-

Real-time market information and analytics

-
- -
- - -
- -
-
-

Market Status

-

Open

-
- schedule -
-
- - -
-
-

Active Instruments

-

{{ marketData().length }}

-
- trending_up -
-
- - -
-

Last Update

-

{{ currentTime() }}

-
- access_time -
-
-
- - - -
-

Live Market Data

-
- - -
-
- @if (isLoading()) { -
- - Loading market data... -
- } @else if (error()) { -
- error -

{{ error() }}

- -
- } @else { -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Symbol{{ stock.symbol }}Price - ${{ stock.price.toFixed(2) }} - Change - {{ stock.change > 0 ? '+' : '' }}${{ stock.change.toFixed(2) }} - Change % - {{ stock.changePercent > 0 ? '+' : '' }}{{ stock.changePercent.toFixed(2) }}% - Volume - {{ stock.volume.toLocaleString() }} - Market Cap - ${{ stock.marketCap }} -
-
- } -
- - - - -
- -
- bar_chart -

Technical analysis charts and indicators will be implemented here

-
-
-
-
- - -
- -
- timeline -

Market trends and sector analysis will be implemented here

-
-
-
-
- - -
- -
- article -

Market news and economic events will be implemented here

-
-
-
-
-
-
+
+
+
+

Market Data

+

Real-time market information and analytics

+
+ +
+ + +
+ +
+
+

Market Status

+

Open

+
+ schedule +
+
+ + +
+
+

Active Instruments

+

{{ marketData().length }}

+
+ trending_up +
+
+ + +
+

Last Update

+

{{ currentTime() }}

+
+ access_time +
+
+
+ + + +
+

Live Market Data

+
+ + +
+
+ @if (isLoading()) { +
+ + Loading market data... +
+ } @else if (error()) { +
+ error +

{{ error() }}

+ +
+ } @else { +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Symbol{{ stock.symbol }}Price + ${{ stock.price.toFixed(2) }} + Change + {{ stock.change > 0 ? '+' : '' }}${{ stock.change.toFixed(2) }} + Change % + {{ stock.changePercent > 0 ? '+' : '' }}{{ stock.changePercent.toFixed(2) }}% + Volume + {{ stock.volume.toLocaleString() }} + Market Cap + ${{ stock.marketCap }} +
+
+ } +
+ + + + +
+ +
+ bar_chart +

Technical analysis charts and indicators will be implemented here

+
+
+
+
+ + +
+ +
+ timeline +

Market trends and sector analysis will be implemented here

+
+
+
+
+ + +
+ +
+ article +

Market news and economic events will be implemented here

+
+
+
+
+
+
diff --git a/apps/dashboard/src/app/pages/market-data/market-data.component.ts b/apps/dashboard/src/app/pages/market-data/market-data.component.ts index e4c8ee2..1ec1227 100644 --- a/apps/dashboard/src/app/pages/market-data/market-data.component.ts +++ b/apps/dashboard/src/app/pages/market-data/market-data.component.ts @@ -1,198 +1,198 @@ -import { Component, signal, OnInit, OnDestroy, inject } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatCardModule } from '@angular/material/card'; -import { MatButtonModule } from '@angular/material/button'; -import { MatIconModule } from '@angular/material/icon'; -import { MatTableModule } from '@angular/material/table'; -import { MatTabsModule } from '@angular/material/tabs'; -import { MatProgressSpinnerModule } from '@angular/material/progress-spinner'; -import { MatSnackBarModule, MatSnackBar } from '@angular/material/snack-bar'; -import { ApiService } from '../../services/api.service'; -import { WebSocketService } from '../../services/websocket.service'; -import { interval, Subscription } from 'rxjs'; - -export interface ExtendedMarketData { - symbol: string; - price: number; - change: number; - changePercent: number; - volume: number; - marketCap: string; - high52Week: number; - low52Week: number; -} - -@Component({ - selector: 'app-market-data', - standalone: true, - imports: [ - CommonModule, - MatCardModule, - MatButtonModule, - MatIconModule, - MatTableModule, - MatTabsModule, - MatProgressSpinnerModule, - MatSnackBarModule - ], - templateUrl: './market-data.component.html', - styleUrl: './market-data.component.css' -}) -export class MarketDataComponent implements OnInit, OnDestroy { - private apiService = inject(ApiService); - private webSocketService = inject(WebSocketService); - private snackBar = inject(MatSnackBar); - private subscriptions: Subscription[] = []; - - protected marketData = signal([]); - protected currentTime = signal(new Date().toLocaleTimeString()); - protected isLoading = signal(true); - protected error = signal(null); - protected displayedColumns: string[] = ['symbol', 'price', 'change', 'changePercent', 'volume', 'marketCap']; - ngOnInit() { - // Update time every second - const timeSubscription = interval(1000).subscribe(() => { - this.currentTime.set(new Date().toLocaleTimeString()); - }); - this.subscriptions.push(timeSubscription); - - // Load initial market data - this.loadMarketData(); - - // Subscribe to real-time market data updates - const wsSubscription = this.webSocketService.getMarketDataUpdates().subscribe({ - next: (update) => { - this.updateMarketData(update); - }, - error: (err) => { - console.error('WebSocket market data error:', err); - } - }); - this.subscriptions.push(wsSubscription); - - // Fallback: Refresh market data every 30 seconds if WebSocket fails - const dataSubscription = interval(30000).subscribe(() => { - if (!this.webSocketService.isConnected()) { - this.loadMarketData(); - } - }); - this.subscriptions.push(dataSubscription); - } - - ngOnDestroy() { - this.subscriptions.forEach(sub => sub.unsubscribe()); - } - private loadMarketData() { - this.apiService.getMarketData().subscribe({ - next: (response) => { - // Convert MarketData to ExtendedMarketData with mock extended properties - const extendedData: ExtendedMarketData[] = response.data.map(item => ({ - ...item, - marketCap: this.getMockMarketCap(item.symbol), - high52Week: item.price * 1.3, // Mock 52-week high (30% above current) - low52Week: item.price * 0.7 // Mock 52-week low (30% below current) - })); - - this.marketData.set(extendedData); - this.isLoading.set(false); - this.error.set(null); - }, - error: (err) => { - console.error('Failed to load market data:', err); - this.error.set('Failed to load market data'); - this.isLoading.set(false); - this.snackBar.open('Failed to load market data', 'Dismiss', { duration: 5000 }); - - // Use mock data as fallback - this.marketData.set(this.getMockData()); - } - }); - } - - private getMockMarketCap(symbol: string): string { - const marketCaps: { [key: string]: string } = { - 'AAPL': '2.98T', - 'GOOGL': '1.78T', - 'MSFT': '3.08T', - 'TSLA': '789.2B', - 'AMZN': '1.59T' - }; - return marketCaps[symbol] || '1.00T'; - } - - private getMockData(): ExtendedMarketData[] { - return [ - { - symbol: 'AAPL', - price: 192.53, - change: 2.41, - changePercent: 1.27, - volume: 45230000, - marketCap: '2.98T', - high52Week: 199.62, - low52Week: 164.08 - }, - { - symbol: 'GOOGL', - price: 2847.56, - change: -12.34, - changePercent: -0.43, - volume: 12450000, - marketCap: '1.78T', - high52Week: 3030.93, - low52Week: 2193.62 - }, - { - symbol: 'MSFT', - price: 415.26, - change: 8.73, - changePercent: 2.15, - volume: 23180000, - marketCap: '3.08T', - high52Week: 468.35, - low52Week: 309.45 - }, - { - symbol: 'TSLA', - price: 248.50, - change: -5.21, - changePercent: -2.05, - volume: 89760000, - marketCap: '789.2B', - high52Week: 299.29, - low52Week: 152.37 - }, - { - symbol: 'AMZN', - price: 152.74, - change: 3.18, - changePercent: 2.12, - volume: 34520000, - marketCap: '1.59T', - high52Week: 170.17, - low52Week: 118.35 - } - ]; - } - refreshData() { - this.isLoading.set(true); - this.loadMarketData(); - } - - private updateMarketData(update: any) { - const currentData = this.marketData(); - const updatedData = currentData.map(item => { - if (item.symbol === update.symbol) { - return { - ...item, - price: update.price, - change: update.change, - changePercent: update.changePercent, - volume: update.volume - }; - } - return item; - }); - this.marketData.set(updatedData); - } -} +import { Component, signal, OnInit, OnDestroy, inject } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatCardModule } from '@angular/material/card'; +import { MatButtonModule } from '@angular/material/button'; +import { MatIconModule } from '@angular/material/icon'; +import { MatTableModule } from '@angular/material/table'; +import { MatTabsModule } from '@angular/material/tabs'; +import { MatProgressSpinnerModule } from '@angular/material/progress-spinner'; +import { MatSnackBarModule, MatSnackBar } from '@angular/material/snack-bar'; +import { ApiService } from '../../services/api.service'; +import { WebSocketService } from '../../services/websocket.service'; +import { interval, Subscription } from 'rxjs'; + +export interface ExtendedMarketData { + symbol: string; + price: number; + change: number; + changePercent: number; + volume: number; + marketCap: string; + high52Week: number; + low52Week: number; +} + +@Component({ + selector: 'app-market-data', + standalone: true, + imports: [ + CommonModule, + MatCardModule, + MatButtonModule, + MatIconModule, + MatTableModule, + MatTabsModule, + MatProgressSpinnerModule, + MatSnackBarModule + ], + templateUrl: './market-data.component.html', + styleUrl: './market-data.component.css' +}) +export class MarketDataComponent implements OnInit, OnDestroy { + private apiService = inject(ApiService); + private webSocketService = inject(WebSocketService); + private snackBar = inject(MatSnackBar); + private subscriptions: Subscription[] = []; + + protected marketData = signal([]); + protected currentTime = signal(new Date().toLocaleTimeString()); + protected isLoading = signal(true); + protected error = signal(null); + protected displayedColumns: string[] = ['symbol', 'price', 'change', 'changePercent', 'volume', 'marketCap']; + ngOnInit() { + // Update time every second + const timeSubscription = interval(1000).subscribe(() => { + this.currentTime.set(new Date().toLocaleTimeString()); + }); + this.subscriptions.push(timeSubscription); + + // Load initial market data + this.loadMarketData(); + + // Subscribe to real-time market data updates + const wsSubscription = this.webSocketService.getMarketDataUpdates().subscribe({ + next: (update) => { + this.updateMarketData(update); + }, + error: (err) => { + console.error('WebSocket market data error:', err); + } + }); + this.subscriptions.push(wsSubscription); + + // Fallback: Refresh market data every 30 seconds if WebSocket fails + const dataSubscription = interval(30000).subscribe(() => { + if (!this.webSocketService.isConnected()) { + this.loadMarketData(); + } + }); + this.subscriptions.push(dataSubscription); + } + + ngOnDestroy() { + this.subscriptions.forEach(sub => sub.unsubscribe()); + } + private loadMarketData() { + this.apiService.getMarketData().subscribe({ + next: (response) => { + // Convert MarketData to ExtendedMarketData with mock extended properties + const extendedData: ExtendedMarketData[] = response.data.map(item => ({ + ...item, + marketCap: this.getMockMarketCap(item.symbol), + high52Week: item.price * 1.3, // Mock 52-week high (30% above current) + low52Week: item.price * 0.7 // Mock 52-week low (30% below current) + })); + + this.marketData.set(extendedData); + this.isLoading.set(false); + this.error.set(null); + }, + error: (err) => { + console.error('Failed to load market data:', err); + this.error.set('Failed to load market data'); + this.isLoading.set(false); + this.snackBar.open('Failed to load market data', 'Dismiss', { duration: 5000 }); + + // Use mock data as fallback + this.marketData.set(this.getMockData()); + } + }); + } + + private getMockMarketCap(symbol: string): string { + const marketCaps: { [key: string]: string } = { + 'AAPL': '2.98T', + 'GOOGL': '1.78T', + 'MSFT': '3.08T', + 'TSLA': '789.2B', + 'AMZN': '1.59T' + }; + return marketCaps[symbol] || '1.00T'; + } + + private getMockData(): ExtendedMarketData[] { + return [ + { + symbol: 'AAPL', + price: 192.53, + change: 2.41, + changePercent: 1.27, + volume: 45230000, + marketCap: '2.98T', + high52Week: 199.62, + low52Week: 164.08 + }, + { + symbol: 'GOOGL', + price: 2847.56, + change: -12.34, + changePercent: -0.43, + volume: 12450000, + marketCap: '1.78T', + high52Week: 3030.93, + low52Week: 2193.62 + }, + { + symbol: 'MSFT', + price: 415.26, + change: 8.73, + changePercent: 2.15, + volume: 23180000, + marketCap: '3.08T', + high52Week: 468.35, + low52Week: 309.45 + }, + { + symbol: 'TSLA', + price: 248.50, + change: -5.21, + changePercent: -2.05, + volume: 89760000, + marketCap: '789.2B', + high52Week: 299.29, + low52Week: 152.37 + }, + { + symbol: 'AMZN', + price: 152.74, + change: 3.18, + changePercent: 2.12, + volume: 34520000, + marketCap: '1.59T', + high52Week: 170.17, + low52Week: 118.35 + } + ]; + } + refreshData() { + this.isLoading.set(true); + this.loadMarketData(); + } + + private updateMarketData(update: any) { + const currentData = this.marketData(); + const updatedData = currentData.map(item => { + if (item.symbol === update.symbol) { + return { + ...item, + price: update.price, + change: update.change, + changePercent: update.changePercent, + volume: update.volume + }; + } + return item; + }); + this.marketData.set(updatedData); + } +} diff --git a/apps/dashboard/src/app/pages/portfolio/portfolio.component.css b/apps/dashboard/src/app/pages/portfolio/portfolio.component.css index 307b5f8..b74a600 100644 --- a/apps/dashboard/src/app/pages/portfolio/portfolio.component.css +++ b/apps/dashboard/src/app/pages/portfolio/portfolio.component.css @@ -1 +1 @@ -/* Portfolio specific styles */ +/* Portfolio specific styles */ diff --git a/apps/dashboard/src/app/pages/portfolio/portfolio.component.html b/apps/dashboard/src/app/pages/portfolio/portfolio.component.html index f62d8ac..91c65e8 100644 --- a/apps/dashboard/src/app/pages/portfolio/portfolio.component.html +++ b/apps/dashboard/src/app/pages/portfolio/portfolio.component.html @@ -1,203 +1,203 @@ -
- -
-
-

Portfolio

-

Manage and monitor your investment portfolio

-
- -
- - -
- -
-
-

Total Value

-

${{ portfolioSummary().totalValue.toLocaleString() }}

-
- account_balance_wallet -
-
- - -
-
-

Total P&L

-

- {{ portfolioSummary().totalPnL > 0 ? '+' : '' }}${{ portfolioSummary().totalPnL.toLocaleString() }} - ({{ portfolioSummary().totalPnLPercent.toFixed(2) }}%) -

-
- trending_up - trending_down -
-
- - -
-
-

Day Change

-

- {{ portfolioSummary().dayChange > 0 ? '+' : '' }}${{ portfolioSummary().dayChange.toLocaleString() }} - ({{ portfolioSummary().dayChangePercent.toFixed(2) }}%) -

-
- today -
-
- - -
-
-

Cash Available

-

${{ portfolioSummary().cash.toLocaleString() }}

-
- attach_money -
-
-
- - - - -
- -
-

Current Positions

-
- - -
-
- - @if (isLoading()) { -
- - Loading portfolio... -
- } @else if (error()) { -
- error -

{{ error() }}

- -
- } @else if (positions().length === 0) { -
- account_balance_wallet -

No positions found

- -
- } @else { -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Symbol{{ position.symbol }}Quantity - {{ position.quantity.toLocaleString() }} - Avg Price - ${{ position.avgPrice.toFixed(2) }} - Current Price - ${{ position.currentPrice.toFixed(2) }} - Market Value - ${{ position.marketValue.toLocaleString() }} - Unrealized P&L - {{ position.unrealizedPnL > 0 ? '+' : '' }}${{ position.unrealizedPnL.toLocaleString() }} - ({{ position.unrealizedPnLPercent.toFixed(2) }}%) - Day Change - {{ position.dayChange > 0 ? '+' : '' }}${{ position.dayChange.toFixed(2) }} - ({{ position.dayChangePercent.toFixed(2) }}%) -
-
- } -
-
-
- - -
- -
- trending_up -

Performance charts and analytics will be implemented here

-
-
-
-
- - -
- -
- receipt -

Order history and management will be implemented here

-
-
-
-
-
-
+
+ +
+
+

Portfolio

+

Manage and monitor your investment portfolio

+
+ +
+ + +
+ +
+
+

Total Value

+

${{ portfolioSummary().totalValue.toLocaleString() }}

+
+ account_balance_wallet +
+
+ + +
+
+

Total P&L

+

+ {{ portfolioSummary().totalPnL > 0 ? '+' : '' }}${{ portfolioSummary().totalPnL.toLocaleString() }} + ({{ portfolioSummary().totalPnLPercent.toFixed(2) }}%) +

+
+ trending_up + trending_down +
+
+ + +
+
+

Day Change

+

+ {{ portfolioSummary().dayChange > 0 ? '+' : '' }}${{ portfolioSummary().dayChange.toLocaleString() }} + ({{ portfolioSummary().dayChangePercent.toFixed(2) }}%) +

+
+ today +
+
+ + +
+
+

Cash Available

+

${{ portfolioSummary().cash.toLocaleString() }}

+
+ attach_money +
+
+
+ + + + +
+ +
+

Current Positions

+
+ + +
+
+ + @if (isLoading()) { +
+ + Loading portfolio... +
+ } @else if (error()) { +
+ error +

{{ error() }}

+ +
+ } @else if (positions().length === 0) { +
+ account_balance_wallet +

No positions found

+ +
+ } @else { +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Symbol{{ position.symbol }}Quantity + {{ position.quantity.toLocaleString() }} + Avg Price + ${{ position.avgPrice.toFixed(2) }} + Current Price + ${{ position.currentPrice.toFixed(2) }} + Market Value + ${{ position.marketValue.toLocaleString() }} + Unrealized P&L + {{ position.unrealizedPnL > 0 ? '+' : '' }}${{ position.unrealizedPnL.toLocaleString() }} + ({{ position.unrealizedPnLPercent.toFixed(2) }}%) + Day Change + {{ position.dayChange > 0 ? '+' : '' }}${{ position.dayChange.toFixed(2) }} + ({{ position.dayChangePercent.toFixed(2) }}%) +
+
+ } +
+
+
+ + +
+ +
+ trending_up +

Performance charts and analytics will be implemented here

+
+
+
+
+ + +
+ +
+ receipt +

Order history and management will be implemented here

+
+
+
+
+
+
diff --git a/apps/dashboard/src/app/pages/portfolio/portfolio.component.ts b/apps/dashboard/src/app/pages/portfolio/portfolio.component.ts index 8711236..bfc5485 100644 --- a/apps/dashboard/src/app/pages/portfolio/portfolio.component.ts +++ b/apps/dashboard/src/app/pages/portfolio/portfolio.component.ts @@ -1,159 +1,159 @@ -import { Component, signal, OnInit, OnDestroy, inject } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatCardModule } from '@angular/material/card'; -import { MatIconModule } from '@angular/material/icon'; -import { MatButtonModule } from '@angular/material/button'; -import { MatTableModule } from '@angular/material/table'; -import { MatProgressSpinnerModule } from '@angular/material/progress-spinner'; -import { MatSnackBarModule, MatSnackBar } from '@angular/material/snack-bar'; -import { MatTabsModule } from '@angular/material/tabs'; -import { ApiService } from '../../services/api.service'; -import { interval, Subscription } from 'rxjs'; - -export interface Position { - symbol: string; - quantity: number; - avgPrice: number; - currentPrice: number; - marketValue: number; - unrealizedPnL: number; - unrealizedPnLPercent: number; - dayChange: number; - dayChangePercent: number; -} - -export interface PortfolioSummary { - totalValue: number; - totalCost: number; - totalPnL: number; - totalPnLPercent: number; - dayChange: number; - dayChangePercent: number; - cash: number; - positionsCount: number; -} - -@Component({ - selector: 'app-portfolio', - standalone: true, - imports: [ - CommonModule, - MatCardModule, - MatIconModule, - MatButtonModule, - MatTableModule, - MatProgressSpinnerModule, - MatSnackBarModule, - MatTabsModule - ], - templateUrl: './portfolio.component.html', - styleUrl: './portfolio.component.css' -}) -export class PortfolioComponent implements OnInit, OnDestroy { - private apiService = inject(ApiService); - private snackBar = inject(MatSnackBar); - private subscriptions: Subscription[] = []; - - protected portfolioSummary = signal({ - totalValue: 0, - totalCost: 0, - totalPnL: 0, - totalPnLPercent: 0, - dayChange: 0, - dayChangePercent: 0, - cash: 0, - positionsCount: 0 - }); - - protected positions = signal([]); - protected isLoading = signal(true); - protected error = signal(null); - protected displayedColumns = ['symbol', 'quantity', 'avgPrice', 'currentPrice', 'marketValue', 'unrealizedPnL', 'dayChange']; - - ngOnInit() { - this.loadPortfolioData(); - - // Refresh portfolio data every 30 seconds - const portfolioSubscription = interval(30000).subscribe(() => { - this.loadPortfolioData(); - }); - this.subscriptions.push(portfolioSubscription); - } - - ngOnDestroy() { - this.subscriptions.forEach(sub => sub.unsubscribe()); - } - - private loadPortfolioData() { - // Since we don't have a portfolio endpoint yet, let's create mock data - // In a real implementation, this would call this.apiService.getPortfolio() - - setTimeout(() => { - const mockPositions: Position[] = [ - { - symbol: 'AAPL', - quantity: 100, - avgPrice: 180.50, - currentPrice: 192.53, - marketValue: 19253, - unrealizedPnL: 1203, - unrealizedPnLPercent: 6.67, - dayChange: 241, - dayChangePercent: 1.27 - }, - { - symbol: 'MSFT', - quantity: 50, - avgPrice: 400.00, - currentPrice: 415.26, - marketValue: 20763, - unrealizedPnL: 763, - unrealizedPnLPercent: 3.82, - dayChange: 436.50, - dayChangePercent: 2.15 - }, - { - symbol: 'GOOGL', - quantity: 10, - avgPrice: 2900.00, - currentPrice: 2847.56, - marketValue: 28475.60, - unrealizedPnL: -524.40, - unrealizedPnLPercent: -1.81, - dayChange: -123.40, - dayChangePercent: -0.43 - } - ]; - - const summary: PortfolioSummary = { - totalValue: mockPositions.reduce((sum, pos) => sum + pos.marketValue, 0) + 25000, // + cash - totalCost: mockPositions.reduce((sum, pos) => sum + (pos.avgPrice * pos.quantity), 0), - totalPnL: mockPositions.reduce((sum, pos) => sum + pos.unrealizedPnL, 0), - totalPnLPercent: 0, - dayChange: mockPositions.reduce((sum, pos) => sum + pos.dayChange, 0), - dayChangePercent: 0, - cash: 25000, - positionsCount: mockPositions.length - }; - - summary.totalPnLPercent = (summary.totalPnL / summary.totalCost) * 100; - summary.dayChangePercent = (summary.dayChange / (summary.totalValue - summary.dayChange)) * 100; - - this.positions.set(mockPositions); - this.portfolioSummary.set(summary); - this.isLoading.set(false); - this.error.set(null); - }, 1000); - } - - refreshData() { - this.isLoading.set(true); - this.loadPortfolioData(); - } - - getPnLColor(value: number): string { - if (value > 0) return 'text-green-600'; - if (value < 0) return 'text-red-600'; - return 'text-gray-600'; - } -} +import { Component, signal, OnInit, OnDestroy, inject } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatCardModule } from '@angular/material/card'; +import { MatIconModule } from '@angular/material/icon'; +import { MatButtonModule } from '@angular/material/button'; +import { MatTableModule } from '@angular/material/table'; +import { MatProgressSpinnerModule } from '@angular/material/progress-spinner'; +import { MatSnackBarModule, MatSnackBar } from '@angular/material/snack-bar'; +import { MatTabsModule } from '@angular/material/tabs'; +import { ApiService } from '../../services/api.service'; +import { interval, Subscription } from 'rxjs'; + +export interface Position { + symbol: string; + quantity: number; + avgPrice: number; + currentPrice: number; + marketValue: number; + unrealizedPnL: number; + unrealizedPnLPercent: number; + dayChange: number; + dayChangePercent: number; +} + +export interface PortfolioSummary { + totalValue: number; + totalCost: number; + totalPnL: number; + totalPnLPercent: number; + dayChange: number; + dayChangePercent: number; + cash: number; + positionsCount: number; +} + +@Component({ + selector: 'app-portfolio', + standalone: true, + imports: [ + CommonModule, + MatCardModule, + MatIconModule, + MatButtonModule, + MatTableModule, + MatProgressSpinnerModule, + MatSnackBarModule, + MatTabsModule + ], + templateUrl: './portfolio.component.html', + styleUrl: './portfolio.component.css' +}) +export class PortfolioComponent implements OnInit, OnDestroy { + private apiService = inject(ApiService); + private snackBar = inject(MatSnackBar); + private subscriptions: Subscription[] = []; + + protected portfolioSummary = signal({ + totalValue: 0, + totalCost: 0, + totalPnL: 0, + totalPnLPercent: 0, + dayChange: 0, + dayChangePercent: 0, + cash: 0, + positionsCount: 0 + }); + + protected positions = signal([]); + protected isLoading = signal(true); + protected error = signal(null); + protected displayedColumns = ['symbol', 'quantity', 'avgPrice', 'currentPrice', 'marketValue', 'unrealizedPnL', 'dayChange']; + + ngOnInit() { + this.loadPortfolioData(); + + // Refresh portfolio data every 30 seconds + const portfolioSubscription = interval(30000).subscribe(() => { + this.loadPortfolioData(); + }); + this.subscriptions.push(portfolioSubscription); + } + + ngOnDestroy() { + this.subscriptions.forEach(sub => sub.unsubscribe()); + } + + private loadPortfolioData() { + // Since we don't have a portfolio endpoint yet, let's create mock data + // In a real implementation, this would call this.apiService.getPortfolio() + + setTimeout(() => { + const mockPositions: Position[] = [ + { + symbol: 'AAPL', + quantity: 100, + avgPrice: 180.50, + currentPrice: 192.53, + marketValue: 19253, + unrealizedPnL: 1203, + unrealizedPnLPercent: 6.67, + dayChange: 241, + dayChangePercent: 1.27 + }, + { + symbol: 'MSFT', + quantity: 50, + avgPrice: 400.00, + currentPrice: 415.26, + marketValue: 20763, + unrealizedPnL: 763, + unrealizedPnLPercent: 3.82, + dayChange: 436.50, + dayChangePercent: 2.15 + }, + { + symbol: 'GOOGL', + quantity: 10, + avgPrice: 2900.00, + currentPrice: 2847.56, + marketValue: 28475.60, + unrealizedPnL: -524.40, + unrealizedPnLPercent: -1.81, + dayChange: -123.40, + dayChangePercent: -0.43 + } + ]; + + const summary: PortfolioSummary = { + totalValue: mockPositions.reduce((sum, pos) => sum + pos.marketValue, 0) + 25000, // + cash + totalCost: mockPositions.reduce((sum, pos) => sum + (pos.avgPrice * pos.quantity), 0), + totalPnL: mockPositions.reduce((sum, pos) => sum + pos.unrealizedPnL, 0), + totalPnLPercent: 0, + dayChange: mockPositions.reduce((sum, pos) => sum + pos.dayChange, 0), + dayChangePercent: 0, + cash: 25000, + positionsCount: mockPositions.length + }; + + summary.totalPnLPercent = (summary.totalPnL / summary.totalCost) * 100; + summary.dayChangePercent = (summary.dayChange / (summary.totalValue - summary.dayChange)) * 100; + + this.positions.set(mockPositions); + this.portfolioSummary.set(summary); + this.isLoading.set(false); + this.error.set(null); + }, 1000); + } + + refreshData() { + this.isLoading.set(true); + this.loadPortfolioData(); + } + + getPnLColor(value: number): string { + if (value > 0) return 'text-green-600'; + if (value < 0) return 'text-red-600'; + return 'text-gray-600'; + } +} diff --git a/apps/dashboard/src/app/pages/risk-management/risk-management.component.css b/apps/dashboard/src/app/pages/risk-management/risk-management.component.css index e193853..7709883 100644 --- a/apps/dashboard/src/app/pages/risk-management/risk-management.component.css +++ b/apps/dashboard/src/app/pages/risk-management/risk-management.component.css @@ -1 +1 @@ -/* Risk Management specific styles */ +/* Risk Management specific styles */ diff --git a/apps/dashboard/src/app/pages/risk-management/risk-management.component.html b/apps/dashboard/src/app/pages/risk-management/risk-management.component.html index 59d88e0..2943e7b 100644 --- a/apps/dashboard/src/app/pages/risk-management/risk-management.component.html +++ b/apps/dashboard/src/app/pages/risk-management/risk-management.component.html @@ -1,178 +1,178 @@ -
- -
-
-

Risk Management

-

Monitor and control trading risks and exposure

-
- -
- - -
- @if (riskThresholds(); as thresholds) { - -
-
-

Max Position Size

-

${{ thresholds.maxPositionSize.toLocaleString() }}

-
- account_balance -
-
- - -
-
-

Max Daily Loss

-

${{ thresholds.maxDailyLoss.toLocaleString() }}

-
- trending_down -
-
- - -
-
-

Portfolio Risk Limit

-

{{ (thresholds.maxPortfolioRisk * 100).toFixed(1) }}%

-
- pie_chart -
-
- - -
-
-

Volatility Limit

-

{{ (thresholds.volatilityLimit * 100).toFixed(1) }}%

-
- show_chart -
-
- } -
- - - -
-

Risk Thresholds Configuration

-
- - @if (isLoading()) { -
- - Loading risk settings... -
- } @else if (error()) { -
- error -

{{ error() }}

- -
- } @else { -
-
- - Max Position Size ($) - - attach_money - - - - Max Daily Loss ($) - - trending_down - - - - Max Portfolio Risk (0-1) - - pie_chart - - - - Volatility Limit (0-1) - - show_chart - -
- -
- -
-
- } -
- - - -
-

Recent Risk Evaluations

-
- - @if (riskHistory().length === 0) { -
- history -

No risk evaluations found

-
- } @else { -
- - - - - - - - - - - - - - - - - - - - - - - - - - - -
Symbol{{ risk.symbol }}Position Value - ${{ risk.positionValue.toLocaleString() }} - Risk Level - - {{ risk.riskLevel }} - - Violations - @if (risk.violations.length > 0) { - {{ risk.violations.join(', ') }} - } @else { - None - } -
-
- } -
-
+
+ +
+
+

Risk Management

+

Monitor and control trading risks and exposure

+
+ +
+ + +
+ @if (riskThresholds(); as thresholds) { + +
+
+

Max Position Size

+

${{ thresholds.maxPositionSize.toLocaleString() }}

+
+ account_balance +
+
+ + +
+
+

Max Daily Loss

+

${{ thresholds.maxDailyLoss.toLocaleString() }}

+
+ trending_down +
+
+ + +
+
+

Portfolio Risk Limit

+

{{ (thresholds.maxPortfolioRisk * 100).toFixed(1) }}%

+
+ pie_chart +
+
+ + +
+
+

Volatility Limit

+

{{ (thresholds.volatilityLimit * 100).toFixed(1) }}%

+
+ show_chart +
+
+ } +
+ + + +
+

Risk Thresholds Configuration

+
+ + @if (isLoading()) { +
+ + Loading risk settings... +
+ } @else if (error()) { +
+ error +

{{ error() }}

+ +
+ } @else { +
+
+ + Max Position Size ($) + + attach_money + + + + Max Daily Loss ($) + + trending_down + + + + Max Portfolio Risk (0-1) + + pie_chart + + + + Volatility Limit (0-1) + + show_chart + +
+ +
+ +
+
+ } +
+ + + +
+

Recent Risk Evaluations

+
+ + @if (riskHistory().length === 0) { +
+ history +

No risk evaluations found

+
+ } @else { +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
Symbol{{ risk.symbol }}Position Value + ${{ risk.positionValue.toLocaleString() }} + Risk Level + + {{ risk.riskLevel }} + + Violations + @if (risk.violations.length > 0) { + {{ risk.violations.join(', ') }} + } @else { + None + } +
+
+ } +
+
diff --git a/apps/dashboard/src/app/pages/risk-management/risk-management.component.ts b/apps/dashboard/src/app/pages/risk-management/risk-management.component.ts index 041df3c..d293650 100644 --- a/apps/dashboard/src/app/pages/risk-management/risk-management.component.ts +++ b/apps/dashboard/src/app/pages/risk-management/risk-management.component.ts @@ -1,135 +1,135 @@ -import { Component, signal, OnInit, OnDestroy, inject } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatCardModule } from '@angular/material/card'; -import { MatIconModule } from '@angular/material/icon'; -import { MatButtonModule } from '@angular/material/button'; -import { MatTableModule } from '@angular/material/table'; -import { MatFormFieldModule } from '@angular/material/form-field'; -import { MatInputModule } from '@angular/material/input'; -import { MatSnackBarModule, MatSnackBar } from '@angular/material/snack-bar'; -import { MatProgressSpinnerModule } from '@angular/material/progress-spinner'; -import { ReactiveFormsModule, FormBuilder, FormGroup, Validators } from '@angular/forms'; -import { ApiService, RiskThresholds, RiskEvaluation } from '../../services/api.service'; -import { interval, Subscription } from 'rxjs'; - -@Component({ - selector: 'app-risk-management', - standalone: true, - imports: [ - CommonModule, - MatCardModule, - MatIconModule, - MatButtonModule, - MatTableModule, - MatFormFieldModule, - MatInputModule, - MatSnackBarModule, - MatProgressSpinnerModule, - ReactiveFormsModule - ], - templateUrl: './risk-management.component.html', - styleUrl: './risk-management.component.css' -}) -export class RiskManagementComponent implements OnInit, OnDestroy { - private apiService = inject(ApiService); - private snackBar = inject(MatSnackBar); - private fb = inject(FormBuilder); - private subscriptions: Subscription[] = []; - - protected riskThresholds = signal(null); - protected riskHistory = signal([]); - protected isLoading = signal(true); - protected isSaving = signal(false); - protected error = signal(null); - - protected thresholdsForm: FormGroup; - protected displayedColumns = ['symbol', 'positionValue', 'riskLevel', 'violations', 'timestamp']; - - constructor() { - this.thresholdsForm = this.fb.group({ - maxPositionSize: [0, [Validators.required, Validators.min(0)]], - maxDailyLoss: [0, [Validators.required, Validators.min(0)]], - maxPortfolioRisk: [0, [Validators.required, Validators.min(0), Validators.max(1)]], - volatilityLimit: [0, [Validators.required, Validators.min(0), Validators.max(1)]] - }); - } - - ngOnInit() { - this.loadRiskThresholds(); - this.loadRiskHistory(); - - // Refresh risk history every 30 seconds - const historySubscription = interval(30000).subscribe(() => { - this.loadRiskHistory(); - }); - this.subscriptions.push(historySubscription); - } - - ngOnDestroy() { - this.subscriptions.forEach(sub => sub.unsubscribe()); - } - - private loadRiskThresholds() { - this.apiService.getRiskThresholds().subscribe({ - next: (response) => { - this.riskThresholds.set(response.data); - this.thresholdsForm.patchValue(response.data); - this.isLoading.set(false); - this.error.set(null); - }, - error: (err) => { - console.error('Failed to load risk thresholds:', err); - this.error.set('Failed to load risk thresholds'); - this.isLoading.set(false); - this.snackBar.open('Failed to load risk thresholds', 'Dismiss', { duration: 5000 }); - } - }); - } - - private loadRiskHistory() { - this.apiService.getRiskHistory().subscribe({ - next: (response) => { - this.riskHistory.set(response.data); - }, - error: (err) => { - console.error('Failed to load risk history:', err); - this.snackBar.open('Failed to load risk history', 'Dismiss', { duration: 3000 }); - } - }); - } - - saveThresholds() { - if (this.thresholdsForm.valid) { - this.isSaving.set(true); - const thresholds = this.thresholdsForm.value as RiskThresholds; - - this.apiService.updateRiskThresholds(thresholds).subscribe({ - next: (response) => { - this.riskThresholds.set(response.data); - this.isSaving.set(false); - this.snackBar.open('Risk thresholds updated successfully', 'Dismiss', { duration: 3000 }); - }, - error: (err) => { - console.error('Failed to save risk thresholds:', err); - this.isSaving.set(false); - this.snackBar.open('Failed to save risk thresholds', 'Dismiss', { duration: 5000 }); - } - }); - } - } - - refreshData() { - this.isLoading.set(true); - this.loadRiskThresholds(); - this.loadRiskHistory(); - } - - getRiskLevelColor(level: string): string { - switch (level) { - case 'LOW': return 'text-green-600'; - case 'MEDIUM': return 'text-yellow-600'; - case 'HIGH': return 'text-red-600'; - default: return 'text-gray-600'; - } - } -} +import { Component, signal, OnInit, OnDestroy, inject } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatCardModule } from '@angular/material/card'; +import { MatIconModule } from '@angular/material/icon'; +import { MatButtonModule } from '@angular/material/button'; +import { MatTableModule } from '@angular/material/table'; +import { MatFormFieldModule } from '@angular/material/form-field'; +import { MatInputModule } from '@angular/material/input'; +import { MatSnackBarModule, MatSnackBar } from '@angular/material/snack-bar'; +import { MatProgressSpinnerModule } from '@angular/material/progress-spinner'; +import { ReactiveFormsModule, FormBuilder, FormGroup, Validators } from '@angular/forms'; +import { ApiService, RiskThresholds, RiskEvaluation } from '../../services/api.service'; +import { interval, Subscription } from 'rxjs'; + +@Component({ + selector: 'app-risk-management', + standalone: true, + imports: [ + CommonModule, + MatCardModule, + MatIconModule, + MatButtonModule, + MatTableModule, + MatFormFieldModule, + MatInputModule, + MatSnackBarModule, + MatProgressSpinnerModule, + ReactiveFormsModule + ], + templateUrl: './risk-management.component.html', + styleUrl: './risk-management.component.css' +}) +export class RiskManagementComponent implements OnInit, OnDestroy { + private apiService = inject(ApiService); + private snackBar = inject(MatSnackBar); + private fb = inject(FormBuilder); + private subscriptions: Subscription[] = []; + + protected riskThresholds = signal(null); + protected riskHistory = signal([]); + protected isLoading = signal(true); + protected isSaving = signal(false); + protected error = signal(null); + + protected thresholdsForm: FormGroup; + protected displayedColumns = ['symbol', 'positionValue', 'riskLevel', 'violations', 'timestamp']; + + constructor() { + this.thresholdsForm = this.fb.group({ + maxPositionSize: [0, [Validators.required, Validators.min(0)]], + maxDailyLoss: [0, [Validators.required, Validators.min(0)]], + maxPortfolioRisk: [0, [Validators.required, Validators.min(0), Validators.max(1)]], + volatilityLimit: [0, [Validators.required, Validators.min(0), Validators.max(1)]] + }); + } + + ngOnInit() { + this.loadRiskThresholds(); + this.loadRiskHistory(); + + // Refresh risk history every 30 seconds + const historySubscription = interval(30000).subscribe(() => { + this.loadRiskHistory(); + }); + this.subscriptions.push(historySubscription); + } + + ngOnDestroy() { + this.subscriptions.forEach(sub => sub.unsubscribe()); + } + + private loadRiskThresholds() { + this.apiService.getRiskThresholds().subscribe({ + next: (response) => { + this.riskThresholds.set(response.data); + this.thresholdsForm.patchValue(response.data); + this.isLoading.set(false); + this.error.set(null); + }, + error: (err) => { + console.error('Failed to load risk thresholds:', err); + this.error.set('Failed to load risk thresholds'); + this.isLoading.set(false); + this.snackBar.open('Failed to load risk thresholds', 'Dismiss', { duration: 5000 }); + } + }); + } + + private loadRiskHistory() { + this.apiService.getRiskHistory().subscribe({ + next: (response) => { + this.riskHistory.set(response.data); + }, + error: (err) => { + console.error('Failed to load risk history:', err); + this.snackBar.open('Failed to load risk history', 'Dismiss', { duration: 3000 }); + } + }); + } + + saveThresholds() { + if (this.thresholdsForm.valid) { + this.isSaving.set(true); + const thresholds = this.thresholdsForm.value as RiskThresholds; + + this.apiService.updateRiskThresholds(thresholds).subscribe({ + next: (response) => { + this.riskThresholds.set(response.data); + this.isSaving.set(false); + this.snackBar.open('Risk thresholds updated successfully', 'Dismiss', { duration: 3000 }); + }, + error: (err) => { + console.error('Failed to save risk thresholds:', err); + this.isSaving.set(false); + this.snackBar.open('Failed to save risk thresholds', 'Dismiss', { duration: 5000 }); + } + }); + } + } + + refreshData() { + this.isLoading.set(true); + this.loadRiskThresholds(); + this.loadRiskHistory(); + } + + getRiskLevelColor(level: string): string { + switch (level) { + case 'LOW': return 'text-green-600'; + case 'MEDIUM': return 'text-yellow-600'; + case 'HIGH': return 'text-red-600'; + default: return 'text-gray-600'; + } + } +} diff --git a/apps/dashboard/src/app/pages/settings/settings.component.css b/apps/dashboard/src/app/pages/settings/settings.component.css index 894a168..4ef363c 100644 --- a/apps/dashboard/src/app/pages/settings/settings.component.css +++ b/apps/dashboard/src/app/pages/settings/settings.component.css @@ -1 +1 @@ -/* Settings specific styles */ +/* Settings specific styles */ diff --git a/apps/dashboard/src/app/pages/settings/settings.component.html b/apps/dashboard/src/app/pages/settings/settings.component.html index 5e4940b..63ca121 100644 --- a/apps/dashboard/src/app/pages/settings/settings.component.html +++ b/apps/dashboard/src/app/pages/settings/settings.component.html @@ -1,15 +1,15 @@ -
-
-
-

Settings

-

Configure application preferences and system settings

-
-
- - -
- settings -

Application settings and configuration will be implemented here

-
-
-
+
+
+
+

Settings

+

Configure application preferences and system settings

+
+
+ + +
+ settings +

Application settings and configuration will be implemented here

+
+
+
diff --git a/apps/dashboard/src/app/pages/settings/settings.component.ts b/apps/dashboard/src/app/pages/settings/settings.component.ts index 77a45d6..95fcfb9 100644 --- a/apps/dashboard/src/app/pages/settings/settings.component.ts +++ b/apps/dashboard/src/app/pages/settings/settings.component.ts @@ -1,13 +1,13 @@ -import { Component } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatCardModule } from '@angular/material/card'; -import { MatIconModule } from '@angular/material/icon'; - -@Component({ - selector: 'app-settings', - standalone: true, - imports: [CommonModule, MatCardModule, MatIconModule], - templateUrl: './settings.component.html', - styleUrl: './settings.component.css' -}) -export class SettingsComponent {} +import { Component } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatCardModule } from '@angular/material/card'; +import { MatIconModule } from '@angular/material/icon'; + +@Component({ + selector: 'app-settings', + standalone: true, + imports: [CommonModule, MatCardModule, MatIconModule], + templateUrl: './settings.component.html', + styleUrl: './settings.component.css' +}) +export class SettingsComponent {} diff --git a/apps/dashboard/src/app/pages/strategies/components/drawdown-chart.component.ts b/apps/dashboard/src/app/pages/strategies/components/drawdown-chart.component.ts index fe74209..e64514c 100644 --- a/apps/dashboard/src/app/pages/strategies/components/drawdown-chart.component.ts +++ b/apps/dashboard/src/app/pages/strategies/components/drawdown-chart.component.ts @@ -1,165 +1,165 @@ -import { Component, Input, OnChanges, SimpleChanges } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { BacktestResult } from '../../../services/strategy.service'; -import { Chart, ChartOptions } from 'chart.js/auto'; - -@Component({ - selector: 'app-drawdown-chart', - standalone: true, - imports: [CommonModule], - template: ` -
- -
- `, - styles: ` - .drawdown-chart-container { - width: 100%; - height: 300px; - margin-bottom: 20px; - } - ` -}) -export class DrawdownChartComponent implements OnChanges { - @Input() backtestResult?: BacktestResult; - - private chart?: Chart; - private chartElement?: HTMLCanvasElement; - - ngOnChanges(changes: SimpleChanges): void { - if (changes['backtestResult'] && this.backtestResult) { - this.renderChart(); - } - } - - ngAfterViewInit(): void { - this.chartElement = document.querySelector('canvas') as HTMLCanvasElement; - if (this.backtestResult) { - this.renderChart(); - } - } - - private renderChart(): void { - if (!this.chartElement || !this.backtestResult) return; - - // Clean up previous chart if it exists - if (this.chart) { - this.chart.destroy(); - } - - // Calculate drawdown series from daily returns - const drawdownData = this.calculateDrawdownSeries(this.backtestResult); - - // Create chart - this.chart = new Chart(this.chartElement, { - type: 'line', - data: { - labels: drawdownData.dates.map(date => this.formatDate(date)), - datasets: [ - { - label: 'Drawdown', - data: drawdownData.drawdowns, - borderColor: 'rgba(255, 99, 132, 1)', - backgroundColor: 'rgba(255, 99, 132, 0.2)', - fill: true, - tension: 0.3, - borderWidth: 2 - } - ] - }, - options: { - responsive: true, - maintainAspectRatio: false, - scales: { - x: { - ticks: { - maxTicksLimit: 12, - maxRotation: 0, - minRotation: 0 - }, - grid: { - display: false - } - }, - y: { - ticks: { - callback: function(value) { - return (value * 100).toFixed(1) + '%'; - } - }, - grid: { - color: 'rgba(200, 200, 200, 0.2)' - }, - min: -0.05, // Show at least 5% drawdown for context - suggestedMax: 0.01 - } - }, - plugins: { - tooltip: { - mode: 'index', - intersect: false, - callbacks: { - label: function(context) { - let label = context.dataset.label || ''; - if (label) { - label += ': '; - } - if (context.parsed.y !== null) { - label += (context.parsed.y * 100).toFixed(2) + '%'; - } - return label; - } - } - }, - legend: { - position: 'top', - } - } - } as ChartOptions - }); - } - - private calculateDrawdownSeries(result: BacktestResult): { - dates: Date[]; - drawdowns: number[]; - } { - const dates: Date[] = []; - const drawdowns: number[] = []; - - // Sort daily returns by date - const sortedReturns = [...result.dailyReturns].sort( - (a, b) => new Date(a.date).getTime() - new Date(b.date).getTime() - ); - - // Calculate equity curve - let equity = 1; - const equityCurve: number[] = []; - - for (const daily of sortedReturns) { - equity *= (1 + daily.return); - equityCurve.push(equity); - dates.push(new Date(daily.date)); - } - - // Calculate running maximum (high water mark) - let hwm = equityCurve[0]; - - for (let i = 0; i < equityCurve.length; i++) { - // Update high water mark - hwm = Math.max(hwm, equityCurve[i]); - // Calculate drawdown as percentage from high water mark - const drawdown = (equityCurve[i] / hwm) - 1; - drawdowns.push(drawdown); - } - - return { dates, drawdowns }; - } - - private formatDate(date: Date): string { - return new Date(date).toLocaleDateString('en-US', { - month: 'short', - day: 'numeric', - year: 'numeric' - }); - } -} +import { Component, Input, OnChanges, SimpleChanges } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { BacktestResult } from '../../../services/strategy.service'; +import { Chart, ChartOptions } from 'chart.js/auto'; + +@Component({ + selector: 'app-drawdown-chart', + standalone: true, + imports: [CommonModule], + template: ` +
+ +
+ `, + styles: ` + .drawdown-chart-container { + width: 100%; + height: 300px; + margin-bottom: 20px; + } + ` +}) +export class DrawdownChartComponent implements OnChanges { + @Input() backtestResult?: BacktestResult; + + private chart?: Chart; + private chartElement?: HTMLCanvasElement; + + ngOnChanges(changes: SimpleChanges): void { + if (changes['backtestResult'] && this.backtestResult) { + this.renderChart(); + } + } + + ngAfterViewInit(): void { + this.chartElement = document.querySelector('canvas') as HTMLCanvasElement; + if (this.backtestResult) { + this.renderChart(); + } + } + + private renderChart(): void { + if (!this.chartElement || !this.backtestResult) return; + + // Clean up previous chart if it exists + if (this.chart) { + this.chart.destroy(); + } + + // Calculate drawdown series from daily returns + const drawdownData = this.calculateDrawdownSeries(this.backtestResult); + + // Create chart + this.chart = new Chart(this.chartElement, { + type: 'line', + data: { + labels: drawdownData.dates.map(date => this.formatDate(date)), + datasets: [ + { + label: 'Drawdown', + data: drawdownData.drawdowns, + borderColor: 'rgba(255, 99, 132, 1)', + backgroundColor: 'rgba(255, 99, 132, 0.2)', + fill: true, + tension: 0.3, + borderWidth: 2 + } + ] + }, + options: { + responsive: true, + maintainAspectRatio: false, + scales: { + x: { + ticks: { + maxTicksLimit: 12, + maxRotation: 0, + minRotation: 0 + }, + grid: { + display: false + } + }, + y: { + ticks: { + callback: function(value) { + return (value * 100).toFixed(1) + '%'; + } + }, + grid: { + color: 'rgba(200, 200, 200, 0.2)' + }, + min: -0.05, // Show at least 5% drawdown for context + suggestedMax: 0.01 + } + }, + plugins: { + tooltip: { + mode: 'index', + intersect: false, + callbacks: { + label: function(context) { + let label = context.dataset.label || ''; + if (label) { + label += ': '; + } + if (context.parsed.y !== null) { + label += (context.parsed.y * 100).toFixed(2) + '%'; + } + return label; + } + } + }, + legend: { + position: 'top', + } + } + } as ChartOptions + }); + } + + private calculateDrawdownSeries(result: BacktestResult): { + dates: Date[]; + drawdowns: number[]; + } { + const dates: Date[] = []; + const drawdowns: number[] = []; + + // Sort daily returns by date + const sortedReturns = [...result.dailyReturns].sort( + (a, b) => new Date(a.date).getTime() - new Date(b.date).getTime() + ); + + // Calculate equity curve + let equity = 1; + const equityCurve: number[] = []; + + for (const daily of sortedReturns) { + equity *= (1 + daily.return); + equityCurve.push(equity); + dates.push(new Date(daily.date)); + } + + // Calculate running maximum (high water mark) + let hwm = equityCurve[0]; + + for (let i = 0; i < equityCurve.length; i++) { + // Update high water mark + hwm = Math.max(hwm, equityCurve[i]); + // Calculate drawdown as percentage from high water mark + const drawdown = (equityCurve[i] / hwm) - 1; + drawdowns.push(drawdown); + } + + return { dates, drawdowns }; + } + + private formatDate(date: Date): string { + return new Date(date).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric' + }); + } +} diff --git a/apps/dashboard/src/app/pages/strategies/components/equity-chart.component.ts b/apps/dashboard/src/app/pages/strategies/components/equity-chart.component.ts index cc24b73..38d5b14 100644 --- a/apps/dashboard/src/app/pages/strategies/components/equity-chart.component.ts +++ b/apps/dashboard/src/app/pages/strategies/components/equity-chart.component.ts @@ -1,171 +1,171 @@ -import { Component, Input, OnChanges, SimpleChanges } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { BacktestResult } from '../../../services/strategy.service'; -import { Chart, ChartOptions } from 'chart.js/auto'; - -@Component({ - selector: 'app-equity-chart', - standalone: true, - imports: [CommonModule], - template: ` -
- -
- `, - styles: ` - .equity-chart-container { - width: 100%; - height: 400px; - margin-bottom: 20px; - } - ` -}) -export class EquityChartComponent implements OnChanges { - @Input() backtestResult?: BacktestResult; - - private chart?: Chart; - private chartElement?: HTMLCanvasElement; - - ngOnChanges(changes: SimpleChanges): void { - if (changes['backtestResult'] && this.backtestResult) { - this.renderChart(); - } - } - - ngAfterViewInit(): void { - this.chartElement = document.querySelector('canvas') as HTMLCanvasElement; - if (this.backtestResult) { - this.renderChart(); - } - } - - private renderChart(): void { - if (!this.chartElement || !this.backtestResult) return; - - // Clean up previous chart if it exists - if (this.chart) { - this.chart.destroy(); - } - - // Prepare data - const equityCurve = this.calculateEquityCurve(this.backtestResult); - - // Create chart - this.chart = new Chart(this.chartElement, { - type: 'line', - data: { - labels: equityCurve.dates.map(date => this.formatDate(date)), - datasets: [ - { - label: 'Portfolio Value', - data: equityCurve.values, - borderColor: 'rgba(75, 192, 192, 1)', - backgroundColor: 'rgba(75, 192, 192, 0.2)', - tension: 0.3, - borderWidth: 2, - fill: true - }, - { - label: 'Benchmark', - data: equityCurve.benchmark, - borderColor: 'rgba(153, 102, 255, 0.5)', - backgroundColor: 'rgba(153, 102, 255, 0.1)', - borderDash: [5, 5], - tension: 0.3, - borderWidth: 1, - fill: false - } - ] - }, - options: { - responsive: true, - maintainAspectRatio: false, - scales: { - x: { - ticks: { - maxTicksLimit: 12, - maxRotation: 0, - minRotation: 0 - }, - grid: { - display: false - } - }, - y: { - ticks: { - callback: function(value) { - return '$' + value.toLocaleString(); - } - }, - grid: { - color: 'rgba(200, 200, 200, 0.2)' - } - } - }, - plugins: { - tooltip: { - mode: 'index', - intersect: false, - callbacks: { - label: function(context) { - let label = context.dataset.label || ''; - if (label) { - label += ': '; - } - if (context.parsed.y !== null) { - label += new Intl.NumberFormat('en-US', { style: 'currency', currency: 'USD' }) - .format(context.parsed.y); - } - return label; - } - } - }, - legend: { - position: 'top', - } - } - } as ChartOptions - }); - } - - private calculateEquityCurve(result: BacktestResult): { - dates: Date[]; - values: number[]; - benchmark: number[]; - } { - const initialValue = result.initialCapital; - const dates: Date[] = []; - const values: number[] = []; - const benchmark: number[] = []; - - // Sort daily returns by date - const sortedReturns = [...result.dailyReturns].sort( - (a, b) => new Date(a.date).getTime() - new Date(b.date).getTime() - ); - - // Calculate cumulative portfolio values - let portfolioValue = initialValue; - let benchmarkValue = initialValue; - - for (const daily of sortedReturns) { - const date = new Date(daily.date); - portfolioValue = portfolioValue * (1 + daily.return); - // Simple benchmark (e.g., assuming 8% annualized return for a market index) - benchmarkValue = benchmarkValue * (1 + 0.08 / 365); - - dates.push(date); - values.push(portfolioValue); - benchmark.push(benchmarkValue); - } - - return { dates, values, benchmark }; - } - - private formatDate(date: Date): string { - return new Date(date).toLocaleDateString('en-US', { - month: 'short', - day: 'numeric', - year: 'numeric' - }); - } -} +import { Component, Input, OnChanges, SimpleChanges } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { BacktestResult } from '../../../services/strategy.service'; +import { Chart, ChartOptions } from 'chart.js/auto'; + +@Component({ + selector: 'app-equity-chart', + standalone: true, + imports: [CommonModule], + template: ` +
+ +
+ `, + styles: ` + .equity-chart-container { + width: 100%; + height: 400px; + margin-bottom: 20px; + } + ` +}) +export class EquityChartComponent implements OnChanges { + @Input() backtestResult?: BacktestResult; + + private chart?: Chart; + private chartElement?: HTMLCanvasElement; + + ngOnChanges(changes: SimpleChanges): void { + if (changes['backtestResult'] && this.backtestResult) { + this.renderChart(); + } + } + + ngAfterViewInit(): void { + this.chartElement = document.querySelector('canvas') as HTMLCanvasElement; + if (this.backtestResult) { + this.renderChart(); + } + } + + private renderChart(): void { + if (!this.chartElement || !this.backtestResult) return; + + // Clean up previous chart if it exists + if (this.chart) { + this.chart.destroy(); + } + + // Prepare data + const equityCurve = this.calculateEquityCurve(this.backtestResult); + + // Create chart + this.chart = new Chart(this.chartElement, { + type: 'line', + data: { + labels: equityCurve.dates.map(date => this.formatDate(date)), + datasets: [ + { + label: 'Portfolio Value', + data: equityCurve.values, + borderColor: 'rgba(75, 192, 192, 1)', + backgroundColor: 'rgba(75, 192, 192, 0.2)', + tension: 0.3, + borderWidth: 2, + fill: true + }, + { + label: 'Benchmark', + data: equityCurve.benchmark, + borderColor: 'rgba(153, 102, 255, 0.5)', + backgroundColor: 'rgba(153, 102, 255, 0.1)', + borderDash: [5, 5], + tension: 0.3, + borderWidth: 1, + fill: false + } + ] + }, + options: { + responsive: true, + maintainAspectRatio: false, + scales: { + x: { + ticks: { + maxTicksLimit: 12, + maxRotation: 0, + minRotation: 0 + }, + grid: { + display: false + } + }, + y: { + ticks: { + callback: function(value) { + return '$' + value.toLocaleString(); + } + }, + grid: { + color: 'rgba(200, 200, 200, 0.2)' + } + } + }, + plugins: { + tooltip: { + mode: 'index', + intersect: false, + callbacks: { + label: function(context) { + let label = context.dataset.label || ''; + if (label) { + label += ': '; + } + if (context.parsed.y !== null) { + label += new Intl.NumberFormat('en-US', { style: 'currency', currency: 'USD' }) + .format(context.parsed.y); + } + return label; + } + } + }, + legend: { + position: 'top', + } + } + } as ChartOptions + }); + } + + private calculateEquityCurve(result: BacktestResult): { + dates: Date[]; + values: number[]; + benchmark: number[]; + } { + const initialValue = result.initialCapital; + const dates: Date[] = []; + const values: number[] = []; + const benchmark: number[] = []; + + // Sort daily returns by date + const sortedReturns = [...result.dailyReturns].sort( + (a, b) => new Date(a.date).getTime() - new Date(b.date).getTime() + ); + + // Calculate cumulative portfolio values + let portfolioValue = initialValue; + let benchmarkValue = initialValue; + + for (const daily of sortedReturns) { + const date = new Date(daily.date); + portfolioValue = portfolioValue * (1 + daily.return); + // Simple benchmark (e.g., assuming 8% annualized return for a market index) + benchmarkValue = benchmarkValue * (1 + 0.08 / 365); + + dates.push(date); + values.push(portfolioValue); + benchmark.push(benchmarkValue); + } + + return { dates, values, benchmark }; + } + + private formatDate(date: Date): string { + return new Date(date).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric' + }); + } +} diff --git a/apps/dashboard/src/app/pages/strategies/components/performance-metrics.component.ts b/apps/dashboard/src/app/pages/strategies/components/performance-metrics.component.ts index 0c7072d..89c0730 100644 --- a/apps/dashboard/src/app/pages/strategies/components/performance-metrics.component.ts +++ b/apps/dashboard/src/app/pages/strategies/components/performance-metrics.component.ts @@ -1,258 +1,258 @@ -import { Component, Input } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatCardModule } from '@angular/material/card'; -import { MatGridListModule } from '@angular/material/grid-list'; -import { MatDividerModule } from '@angular/material/divider'; -import { MatTooltipModule } from '@angular/material/tooltip'; -import { BacktestResult } from '../../../services/strategy.service'; - -@Component({ - selector: 'app-performance-metrics', - standalone: true, - imports: [ - CommonModule, - MatCardModule, - MatGridListModule, - MatDividerModule, - MatTooltipModule - ], - template: ` - - - Performance Metrics - - -
-
-

Returns

-
-
-
Total Return
-
- {{formatPercent(backtestResult?.totalReturn || 0)}} -
-
-
-
Annualized Return
-
- {{formatPercent(backtestResult?.annualizedReturn || 0)}} -
-
-
-
CAGR
-
- {{formatPercent(backtestResult?.cagr || 0)}} -
-
-
-
- - - -
-

Risk Metrics

-
-
-
Max Drawdown
-
- {{formatPercent(backtestResult?.maxDrawdown || 0)}} -
-
-
-
Max DD Duration
-
- {{formatDays(backtestResult?.maxDrawdownDuration || 0)}} -
-
-
-
Volatility
-
- {{formatPercent(backtestResult?.volatility || 0)}} -
-
-
-
Ulcer Index
-
- {{(backtestResult?.ulcerIndex || 0).toFixed(4)}} -
-
-
-
- - - -
-

Risk-Adjusted Returns

-
-
-
Sharpe Ratio
-
- {{(backtestResult?.sharpeRatio || 0).toFixed(2)}} -
-
-
-
Sortino Ratio
-
- {{(backtestResult?.sortinoRatio || 0).toFixed(2)}} -
-
-
-
Calmar Ratio
-
- {{(backtestResult?.calmarRatio || 0).toFixed(2)}} -
-
-
-
Omega Ratio
-
- {{(backtestResult?.omegaRatio || 0).toFixed(2)}} -
-
-
-
- - - -
-

Trade Statistics

-
-
-
Total Trades
-
- {{backtestResult?.totalTrades || 0}} -
-
-
-
Win Rate
-
- {{formatPercent(backtestResult?.winRate || 0)}} -
-
-
-
Avg Win
-
- {{formatPercent(backtestResult?.averageWinningTrade || 0)}} -
-
-
-
Avg Loss
-
- {{formatPercent(backtestResult?.averageLosingTrade || 0)}} -
-
-
-
Profit Factor
-
- {{(backtestResult?.profitFactor || 0).toFixed(2)}} -
-
-
-
-
-
-
- `, - styles: ` - .metrics-card { - margin-bottom: 20px; - } - - .metrics-grid { - display: flex; - flex-direction: column; - gap: 16px; - } - - .metric-group { - padding: 10px 0; - } - - .metric-group h3 { - margin-top: 0; - margin-bottom: 16px; - font-size: 16px; - font-weight: 500; - color: #555; - } - - .metrics-row { - display: flex; - flex-wrap: wrap; - gap: 24px; - } - - .metric { - min-width: 120px; - margin-bottom: 16px; - } - - .metric-name { - font-size: 12px; - color: #666; - margin-bottom: 4px; - } - - .metric-value { - font-size: 16px; - font-weight: 500; - } - - .positive { - color: #4CAF50; - } - - .negative { - color: #F44336; - } - - .neutral { - color: #FFA000; - } - - mat-divider { - margin: 8px 0; - } - ` -}) -export class PerformanceMetricsComponent { - @Input() backtestResult?: BacktestResult; - - // Formatting helpers - formatPercent(value: number): string { - return new Intl.NumberFormat('en-US', { - style: 'percent', - minimumFractionDigits: 2, - maximumFractionDigits: 2 - }).format(value); - } - - formatDays(days: number): string { - return `${days} days`; - } - - // Conditional classes - getReturnClass(value: number): string { - if (value > 0) return 'positive'; - if (value < 0) return 'negative'; - return ''; - } - - getRatioClass(value: number): string { - if (value >= 1.5) return 'positive'; - if (value >= 1) return 'neutral'; - if (value < 0) return 'negative'; - return ''; - } - - getWinRateClass(value: number): string { - if (value >= 0.55) return 'positive'; - if (value >= 0.45) return 'neutral'; - return 'negative'; - } - - getProfitFactorClass(value: number): string { - if (value >= 1.5) return 'positive'; - if (value >= 1) return 'neutral'; - return 'negative'; - } -} +import { Component, Input } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatCardModule } from '@angular/material/card'; +import { MatGridListModule } from '@angular/material/grid-list'; +import { MatDividerModule } from '@angular/material/divider'; +import { MatTooltipModule } from '@angular/material/tooltip'; +import { BacktestResult } from '../../../services/strategy.service'; + +@Component({ + selector: 'app-performance-metrics', + standalone: true, + imports: [ + CommonModule, + MatCardModule, + MatGridListModule, + MatDividerModule, + MatTooltipModule + ], + template: ` + + + Performance Metrics + + +
+
+

Returns

+
+
+
Total Return
+
+ {{formatPercent(backtestResult?.totalReturn || 0)}} +
+
+
+
Annualized Return
+
+ {{formatPercent(backtestResult?.annualizedReturn || 0)}} +
+
+
+
CAGR
+
+ {{formatPercent(backtestResult?.cagr || 0)}} +
+
+
+
+ + + +
+

Risk Metrics

+
+
+
Max Drawdown
+
+ {{formatPercent(backtestResult?.maxDrawdown || 0)}} +
+
+
+
Max DD Duration
+
+ {{formatDays(backtestResult?.maxDrawdownDuration || 0)}} +
+
+
+
Volatility
+
+ {{formatPercent(backtestResult?.volatility || 0)}} +
+
+
+
Ulcer Index
+
+ {{(backtestResult?.ulcerIndex || 0).toFixed(4)}} +
+
+
+
+ + + +
+

Risk-Adjusted Returns

+
+
+
Sharpe Ratio
+
+ {{(backtestResult?.sharpeRatio || 0).toFixed(2)}} +
+
+
+
Sortino Ratio
+
+ {{(backtestResult?.sortinoRatio || 0).toFixed(2)}} +
+
+
+
Calmar Ratio
+
+ {{(backtestResult?.calmarRatio || 0).toFixed(2)}} +
+
+
+
Omega Ratio
+
+ {{(backtestResult?.omegaRatio || 0).toFixed(2)}} +
+
+
+
+ + + +
+

Trade Statistics

+
+
+
Total Trades
+
+ {{backtestResult?.totalTrades || 0}} +
+
+
+
Win Rate
+
+ {{formatPercent(backtestResult?.winRate || 0)}} +
+
+
+
Avg Win
+
+ {{formatPercent(backtestResult?.averageWinningTrade || 0)}} +
+
+
+
Avg Loss
+
+ {{formatPercent(backtestResult?.averageLosingTrade || 0)}} +
+
+
+
Profit Factor
+
+ {{(backtestResult?.profitFactor || 0).toFixed(2)}} +
+
+
+
+
+
+
+ `, + styles: ` + .metrics-card { + margin-bottom: 20px; + } + + .metrics-grid { + display: flex; + flex-direction: column; + gap: 16px; + } + + .metric-group { + padding: 10px 0; + } + + .metric-group h3 { + margin-top: 0; + margin-bottom: 16px; + font-size: 16px; + font-weight: 500; + color: #555; + } + + .metrics-row { + display: flex; + flex-wrap: wrap; + gap: 24px; + } + + .metric { + min-width: 120px; + margin-bottom: 16px; + } + + .metric-name { + font-size: 12px; + color: #666; + margin-bottom: 4px; + } + + .metric-value { + font-size: 16px; + font-weight: 500; + } + + .positive { + color: #4CAF50; + } + + .negative { + color: #F44336; + } + + .neutral { + color: #FFA000; + } + + mat-divider { + margin: 8px 0; + } + ` +}) +export class PerformanceMetricsComponent { + @Input() backtestResult?: BacktestResult; + + // Formatting helpers + formatPercent(value: number): string { + return new Intl.NumberFormat('en-US', { + style: 'percent', + minimumFractionDigits: 2, + maximumFractionDigits: 2 + }).format(value); + } + + formatDays(days: number): string { + return `${days} days`; + } + + // Conditional classes + getReturnClass(value: number): string { + if (value > 0) return 'positive'; + if (value < 0) return 'negative'; + return ''; + } + + getRatioClass(value: number): string { + if (value >= 1.5) return 'positive'; + if (value >= 1) return 'neutral'; + if (value < 0) return 'negative'; + return ''; + } + + getWinRateClass(value: number): string { + if (value >= 0.55) return 'positive'; + if (value >= 0.45) return 'neutral'; + return 'negative'; + } + + getProfitFactorClass(value: number): string { + if (value >= 1.5) return 'positive'; + if (value >= 1) return 'neutral'; + return 'negative'; + } +} diff --git a/apps/dashboard/src/app/pages/strategies/components/trades-table.component.ts b/apps/dashboard/src/app/pages/strategies/components/trades-table.component.ts index ceb62c7..3722aef 100644 --- a/apps/dashboard/src/app/pages/strategies/components/trades-table.component.ts +++ b/apps/dashboard/src/app/pages/strategies/components/trades-table.component.ts @@ -1,221 +1,221 @@ -import { Component, Input } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatTableModule } from '@angular/material/table'; -import { MatSortModule, Sort } from '@angular/material/sort'; -import { MatPaginatorModule, PageEvent } from '@angular/material/paginator'; -import { MatCardModule } from '@angular/material/card'; -import { MatIconModule } from '@angular/material/icon'; -import { BacktestResult } from '../../../services/strategy.service'; - -@Component({ - selector: 'app-trades-table', - standalone: true, - imports: [ - CommonModule, - MatTableModule, - MatSortModule, - MatPaginatorModule, - MatCardModule, - MatIconModule - ], - template: ` - - - Trades - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Symbol {{trade.symbol}} Entry Time {{formatDate(trade.entryTime)}} Entry Price {{formatCurrency(trade.entryPrice)}} Exit Time {{formatDate(trade.exitTime)}} Exit Price {{formatCurrency(trade.exitPrice)}} Quantity {{trade.quantity}} P&L - {{formatCurrency(trade.pnl)}} - P&L % - {{formatPercent(trade.pnlPercent)}} -
- - - -
-
- `, - styles: ` - .trades-card { - margin-bottom: 20px; - } - - .trades-table { - width: 100%; - border-collapse: collapse; - } - - .mat-column-pnl, .mat-column-pnlPercent { - text-align: right; - font-weight: 500; - } - - .positive { - color: #4CAF50; - } - - .negative { - color: #F44336; - } - - .mat-mdc-row:hover { - background-color: rgba(0, 0, 0, 0.04); - } - ` -}) -export class TradesTableComponent { - @Input() set backtestResult(value: BacktestResult | undefined) { - if (value) { - this._backtestResult = value; - this.updateDisplayedTrades(); - } - } - - get backtestResult(): BacktestResult | undefined { - return this._backtestResult; - } - - private _backtestResult?: BacktestResult; - - // Table configuration - displayedColumns: string[] = [ - 'symbol', 'entryTime', 'entryPrice', 'exitTime', - 'exitPrice', 'quantity', 'pnl', 'pnlPercent' - ]; - - // Pagination - pageSize = 10; - currentPage = 0; - displayedTrades: any[] = []; - - get totalTrades(): number { - return this._backtestResult?.trades.length || 0; - } - - // Sort the trades - sortData(sort: Sort): void { - if (!sort.active || sort.direction === '') { - this.updateDisplayedTrades(); - return; - } - - const data = this._backtestResult?.trades.slice() || []; - - this.displayedTrades = data.sort((a, b) => { - const isAsc = sort.direction === 'asc'; - switch (sort.active) { - case 'symbol': return this.compare(a.symbol, b.symbol, isAsc); - case 'entryTime': return this.compare(new Date(a.entryTime).getTime(), new Date(b.entryTime).getTime(), isAsc); - case 'entryPrice': return this.compare(a.entryPrice, b.entryPrice, isAsc); - case 'exitTime': return this.compare(new Date(a.exitTime).getTime(), new Date(b.exitTime).getTime(), isAsc); - case 'exitPrice': return this.compare(a.exitPrice, b.exitPrice, isAsc); - case 'quantity': return this.compare(a.quantity, b.quantity, isAsc); - case 'pnl': return this.compare(a.pnl, b.pnl, isAsc); - case 'pnlPercent': return this.compare(a.pnlPercent, b.pnlPercent, isAsc); - default: return 0; - } - }).slice(this.currentPage * this.pageSize, (this.currentPage + 1) * this.pageSize); - } - - // Handle page changes - pageChange(event: PageEvent): void { - this.pageSize = event.pageSize; - this.currentPage = event.pageIndex; - this.updateDisplayedTrades(); - } - - // Update displayed trades based on current page and page size - updateDisplayedTrades(): void { - if (this._backtestResult) { - this.displayedTrades = this._backtestResult.trades.slice( - this.currentPage * this.pageSize, - (this.currentPage + 1) * this.pageSize - ); - } else { - this.displayedTrades = []; - } - } - - // Helper methods for formatting - formatDate(date: Date | string): string { - return new Date(date).toLocaleString(); - } - - formatCurrency(value: number): string { - return new Intl.NumberFormat('en-US', { - style: 'currency', - currency: 'USD', - }).format(value); - } - - formatPercent(value: number): string { - return new Intl.NumberFormat('en-US', { - style: 'percent', - minimumFractionDigits: 2, - maximumFractionDigits: 2 - }).format(value); - } - - private compare(a: number | string, b: number | string, isAsc: boolean): number { - return (a < b ? -1 : 1) * (isAsc ? 1 : -1); - } -} +import { Component, Input } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatTableModule } from '@angular/material/table'; +import { MatSortModule, Sort } from '@angular/material/sort'; +import { MatPaginatorModule, PageEvent } from '@angular/material/paginator'; +import { MatCardModule } from '@angular/material/card'; +import { MatIconModule } from '@angular/material/icon'; +import { BacktestResult } from '../../../services/strategy.service'; + +@Component({ + selector: 'app-trades-table', + standalone: true, + imports: [ + CommonModule, + MatTableModule, + MatSortModule, + MatPaginatorModule, + MatCardModule, + MatIconModule + ], + template: ` + + + Trades + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Symbol {{trade.symbol}} Entry Time {{formatDate(trade.entryTime)}} Entry Price {{formatCurrency(trade.entryPrice)}} Exit Time {{formatDate(trade.exitTime)}} Exit Price {{formatCurrency(trade.exitPrice)}} Quantity {{trade.quantity}} P&L + {{formatCurrency(trade.pnl)}} + P&L % + {{formatPercent(trade.pnlPercent)}} +
+ + + +
+
+ `, + styles: ` + .trades-card { + margin-bottom: 20px; + } + + .trades-table { + width: 100%; + border-collapse: collapse; + } + + .mat-column-pnl, .mat-column-pnlPercent { + text-align: right; + font-weight: 500; + } + + .positive { + color: #4CAF50; + } + + .negative { + color: #F44336; + } + + .mat-mdc-row:hover { + background-color: rgba(0, 0, 0, 0.04); + } + ` +}) +export class TradesTableComponent { + @Input() set backtestResult(value: BacktestResult | undefined) { + if (value) { + this._backtestResult = value; + this.updateDisplayedTrades(); + } + } + + get backtestResult(): BacktestResult | undefined { + return this._backtestResult; + } + + private _backtestResult?: BacktestResult; + + // Table configuration + displayedColumns: string[] = [ + 'symbol', 'entryTime', 'entryPrice', 'exitTime', + 'exitPrice', 'quantity', 'pnl', 'pnlPercent' + ]; + + // Pagination + pageSize = 10; + currentPage = 0; + displayedTrades: any[] = []; + + get totalTrades(): number { + return this._backtestResult?.trades.length || 0; + } + + // Sort the trades + sortData(sort: Sort): void { + if (!sort.active || sort.direction === '') { + this.updateDisplayedTrades(); + return; + } + + const data = this._backtestResult?.trades.slice() || []; + + this.displayedTrades = data.sort((a, b) => { + const isAsc = sort.direction === 'asc'; + switch (sort.active) { + case 'symbol': return this.compare(a.symbol, b.symbol, isAsc); + case 'entryTime': return this.compare(new Date(a.entryTime).getTime(), new Date(b.entryTime).getTime(), isAsc); + case 'entryPrice': return this.compare(a.entryPrice, b.entryPrice, isAsc); + case 'exitTime': return this.compare(new Date(a.exitTime).getTime(), new Date(b.exitTime).getTime(), isAsc); + case 'exitPrice': return this.compare(a.exitPrice, b.exitPrice, isAsc); + case 'quantity': return this.compare(a.quantity, b.quantity, isAsc); + case 'pnl': return this.compare(a.pnl, b.pnl, isAsc); + case 'pnlPercent': return this.compare(a.pnlPercent, b.pnlPercent, isAsc); + default: return 0; + } + }).slice(this.currentPage * this.pageSize, (this.currentPage + 1) * this.pageSize); + } + + // Handle page changes + pageChange(event: PageEvent): void { + this.pageSize = event.pageSize; + this.currentPage = event.pageIndex; + this.updateDisplayedTrades(); + } + + // Update displayed trades based on current page and page size + updateDisplayedTrades(): void { + if (this._backtestResult) { + this.displayedTrades = this._backtestResult.trades.slice( + this.currentPage * this.pageSize, + (this.currentPage + 1) * this.pageSize + ); + } else { + this.displayedTrades = []; + } + } + + // Helper methods for formatting + formatDate(date: Date | string): string { + return new Date(date).toLocaleString(); + } + + formatCurrency(value: number): string { + return new Intl.NumberFormat('en-US', { + style: 'currency', + currency: 'USD', + }).format(value); + } + + formatPercent(value: number): string { + return new Intl.NumberFormat('en-US', { + style: 'percent', + minimumFractionDigits: 2, + maximumFractionDigits: 2 + }).format(value); + } + + private compare(a: number | string, b: number | string, isAsc: boolean): number { + return (a < b ? -1 : 1) * (isAsc ? 1 : -1); + } +} diff --git a/apps/dashboard/src/app/pages/strategies/dialogs/backtest-dialog.component.ts b/apps/dashboard/src/app/pages/strategies/dialogs/backtest-dialog.component.ts index ad73d3b..2679c28 100644 --- a/apps/dashboard/src/app/pages/strategies/dialogs/backtest-dialog.component.ts +++ b/apps/dashboard/src/app/pages/strategies/dialogs/backtest-dialog.component.ts @@ -1,185 +1,185 @@ -import { Component, Inject, OnInit } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { - FormBuilder, - FormGroup, - ReactiveFormsModule, - Validators -} from '@angular/forms'; -import { MatButtonModule } from '@angular/material/button'; -import { MatDialogModule, MAT_DIALOG_DATA, MatDialogRef } from '@angular/material/dialog'; -import { MatFormFieldModule } from '@angular/material/form-field'; -import { MatInputModule } from '@angular/material/input'; -import { MatSelectModule } from '@angular/material/select'; -import { MatDatepickerModule } from '@angular/material/datepicker'; -import { MatNativeDateModule } from '@angular/material/core'; -import { MatProgressBarModule } from '@angular/material/progress-bar'; -import { MatTabsModule } from '@angular/material/tabs'; -import { MatChipsModule } from '@angular/material/chips'; -import { MatIconModule } from '@angular/material/icon'; -import { MatSlideToggleModule } from '@angular/material/slide-toggle'; -import { - BacktestRequest, - BacktestResult, - StrategyService, - TradingStrategy -} from '../../../services/strategy.service'; - -@Component({ - selector: 'app-backtest-dialog', - standalone: true, - imports: [ - CommonModule, - ReactiveFormsModule, - MatButtonModule, - MatDialogModule, - MatFormFieldModule, - MatInputModule, - MatSelectModule, - MatDatepickerModule, - MatNativeDateModule, - MatProgressBarModule, - MatTabsModule, - MatChipsModule, - MatIconModule, - MatSlideToggleModule - ], - templateUrl: './backtest-dialog.component.html', - styleUrl: './backtest-dialog.component.css' -}) -export class BacktestDialogComponent implements OnInit { - backtestForm: FormGroup; - strategyTypes: string[] = []; - availableSymbols: string[] = ['AAPL', 'MSFT', 'GOOGL', 'AMZN', 'TSLA', 'META', 'NVDA', 'SPY', 'QQQ']; - selectedSymbols: string[] = []; - parameters: Record = {}; - isRunning: boolean = false; - backtestResult: BacktestResult | null = null; - - constructor( - private fb: FormBuilder, - private strategyService: StrategyService, - @Inject(MAT_DIALOG_DATA) public data: TradingStrategy | null, - private dialogRef: MatDialogRef - ) { - // Initialize form with defaults - this.backtestForm = this.fb.group({ - strategyType: ['', [Validators.required]], - startDate: [new Date(new Date().setFullYear(new Date().getFullYear() - 1)), [Validators.required]], - endDate: [new Date(), [Validators.required]], - initialCapital: [100000, [Validators.required, Validators.min(1000)]], - dataResolution: ['1d', [Validators.required]], - commission: [0.001, [Validators.required, Validators.min(0), Validators.max(0.1)]], - slippage: [0.0005, [Validators.required, Validators.min(0), Validators.max(0.1)]], - mode: ['event', [Validators.required]] - }); - - // If strategy is provided, pre-populate the form - if (data) { - this.selectedSymbols = [...data.symbols]; - this.backtestForm.patchValue({ - strategyType: data.type - }); - this.parameters = {...data.parameters}; - } - } - - ngOnInit(): void { - this.loadStrategyTypes(); - } - - loadStrategyTypes(): void { - this.strategyService.getStrategyTypes().subscribe({ - next: (response) => { - if (response.success) { - this.strategyTypes = response.data; - - // If strategy is provided, load its parameters - if (this.data) { - this.onStrategyTypeChange(this.data.type); - } - } - }, - error: (error) => { - console.error('Error loading strategy types:', error); - this.strategyTypes = ['MOVING_AVERAGE_CROSSOVER', 'MEAN_REVERSION', 'CUSTOM']; - } - }); - } - - onStrategyTypeChange(type: string): void { - // Get default parameters for this strategy type - this.strategyService.getStrategyParameters(type).subscribe({ - next: (response) => { - if (response.success) { - // If strategy is provided, merge default with existing - if (this.data) { - this.parameters = { - ...response.data, - ...this.data.parameters - }; - } else { - this.parameters = response.data; - } - } - }, - error: (error) => { - console.error('Error loading parameters:', error); - this.parameters = {}; - } - }); - } - - addSymbol(symbol: string): void { - if (!symbol || this.selectedSymbols.includes(symbol)) return; - this.selectedSymbols.push(symbol); - } - - removeSymbol(symbol: string): void { - this.selectedSymbols = this.selectedSymbols.filter(s => s !== symbol); - } - - updateParameter(key: string, value: any): void { - this.parameters[key] = value; - } - - onSubmit(): void { - if (this.backtestForm.invalid || this.selectedSymbols.length === 0) { - return; - } - - const formValue = this.backtestForm.value; - - const backtestRequest: BacktestRequest = { - strategyType: formValue.strategyType, - strategyParams: this.parameters, - symbols: this.selectedSymbols, - startDate: formValue.startDate, - endDate: formValue.endDate, - initialCapital: formValue.initialCapital, - dataResolution: formValue.dataResolution, - commission: formValue.commission, - slippage: formValue.slippage, - mode: formValue.mode - }; - - this.isRunning = true; - - this.strategyService.runBacktest(backtestRequest).subscribe({ - next: (response) => { - this.isRunning = false; - if (response.success) { - this.backtestResult = response.data; - } - }, - error: (error) => { - this.isRunning = false; - console.error('Backtest error:', error); - } - }); - } - - close(): void { - this.dialogRef.close(this.backtestResult); - } -} +import { Component, Inject, OnInit } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { + FormBuilder, + FormGroup, + ReactiveFormsModule, + Validators +} from '@angular/forms'; +import { MatButtonModule } from '@angular/material/button'; +import { MatDialogModule, MAT_DIALOG_DATA, MatDialogRef } from '@angular/material/dialog'; +import { MatFormFieldModule } from '@angular/material/form-field'; +import { MatInputModule } from '@angular/material/input'; +import { MatSelectModule } from '@angular/material/select'; +import { MatDatepickerModule } from '@angular/material/datepicker'; +import { MatNativeDateModule } from '@angular/material/core'; +import { MatProgressBarModule } from '@angular/material/progress-bar'; +import { MatTabsModule } from '@angular/material/tabs'; +import { MatChipsModule } from '@angular/material/chips'; +import { MatIconModule } from '@angular/material/icon'; +import { MatSlideToggleModule } from '@angular/material/slide-toggle'; +import { + BacktestRequest, + BacktestResult, + StrategyService, + TradingStrategy +} from '../../../services/strategy.service'; + +@Component({ + selector: 'app-backtest-dialog', + standalone: true, + imports: [ + CommonModule, + ReactiveFormsModule, + MatButtonModule, + MatDialogModule, + MatFormFieldModule, + MatInputModule, + MatSelectModule, + MatDatepickerModule, + MatNativeDateModule, + MatProgressBarModule, + MatTabsModule, + MatChipsModule, + MatIconModule, + MatSlideToggleModule + ], + templateUrl: './backtest-dialog.component.html', + styleUrl: './backtest-dialog.component.css' +}) +export class BacktestDialogComponent implements OnInit { + backtestForm: FormGroup; + strategyTypes: string[] = []; + availableSymbols: string[] = ['AAPL', 'MSFT', 'GOOGL', 'AMZN', 'TSLA', 'META', 'NVDA', 'SPY', 'QQQ']; + selectedSymbols: string[] = []; + parameters: Record = {}; + isRunning: boolean = false; + backtestResult: BacktestResult | null = null; + + constructor( + private fb: FormBuilder, + private strategyService: StrategyService, + @Inject(MAT_DIALOG_DATA) public data: TradingStrategy | null, + private dialogRef: MatDialogRef + ) { + // Initialize form with defaults + this.backtestForm = this.fb.group({ + strategyType: ['', [Validators.required]], + startDate: [new Date(new Date().setFullYear(new Date().getFullYear() - 1)), [Validators.required]], + endDate: [new Date(), [Validators.required]], + initialCapital: [100000, [Validators.required, Validators.min(1000)]], + dataResolution: ['1d', [Validators.required]], + commission: [0.001, [Validators.required, Validators.min(0), Validators.max(0.1)]], + slippage: [0.0005, [Validators.required, Validators.min(0), Validators.max(0.1)]], + mode: ['event', [Validators.required]] + }); + + // If strategy is provided, pre-populate the form + if (data) { + this.selectedSymbols = [...data.symbols]; + this.backtestForm.patchValue({ + strategyType: data.type + }); + this.parameters = {...data.parameters}; + } + } + + ngOnInit(): void { + this.loadStrategyTypes(); + } + + loadStrategyTypes(): void { + this.strategyService.getStrategyTypes().subscribe({ + next: (response) => { + if (response.success) { + this.strategyTypes = response.data; + + // If strategy is provided, load its parameters + if (this.data) { + this.onStrategyTypeChange(this.data.type); + } + } + }, + error: (error) => { + console.error('Error loading strategy types:', error); + this.strategyTypes = ['MOVING_AVERAGE_CROSSOVER', 'MEAN_REVERSION', 'CUSTOM']; + } + }); + } + + onStrategyTypeChange(type: string): void { + // Get default parameters for this strategy type + this.strategyService.getStrategyParameters(type).subscribe({ + next: (response) => { + if (response.success) { + // If strategy is provided, merge default with existing + if (this.data) { + this.parameters = { + ...response.data, + ...this.data.parameters + }; + } else { + this.parameters = response.data; + } + } + }, + error: (error) => { + console.error('Error loading parameters:', error); + this.parameters = {}; + } + }); + } + + addSymbol(symbol: string): void { + if (!symbol || this.selectedSymbols.includes(symbol)) return; + this.selectedSymbols.push(symbol); + } + + removeSymbol(symbol: string): void { + this.selectedSymbols = this.selectedSymbols.filter(s => s !== symbol); + } + + updateParameter(key: string, value: any): void { + this.parameters[key] = value; + } + + onSubmit(): void { + if (this.backtestForm.invalid || this.selectedSymbols.length === 0) { + return; + } + + const formValue = this.backtestForm.value; + + const backtestRequest: BacktestRequest = { + strategyType: formValue.strategyType, + strategyParams: this.parameters, + symbols: this.selectedSymbols, + startDate: formValue.startDate, + endDate: formValue.endDate, + initialCapital: formValue.initialCapital, + dataResolution: formValue.dataResolution, + commission: formValue.commission, + slippage: formValue.slippage, + mode: formValue.mode + }; + + this.isRunning = true; + + this.strategyService.runBacktest(backtestRequest).subscribe({ + next: (response) => { + this.isRunning = false; + if (response.success) { + this.backtestResult = response.data; + } + }, + error: (error) => { + this.isRunning = false; + console.error('Backtest error:', error); + } + }); + } + + close(): void { + this.dialogRef.close(this.backtestResult); + } +} diff --git a/apps/dashboard/src/app/pages/strategies/dialogs/strategy-dialog.component.html b/apps/dashboard/src/app/pages/strategies/dialogs/strategy-dialog.component.html index 695db5a..157087c 100644 --- a/apps/dashboard/src/app/pages/strategies/dialogs/strategy-dialog.component.html +++ b/apps/dashboard/src/app/pages/strategies/dialogs/strategy-dialog.component.html @@ -1,84 +1,84 @@ -

{{isEditMode ? 'Edit Strategy' : 'Create Strategy'}}

- -
- -
- - - Strategy Name - - Name is required - - - - Description - - - - - Strategy Type - - - {{type}} - - - Strategy type is required - - - -
- -
- - {{symbol}} - cancel - -
- -
- - Add Symbol - - - -
- -
-

Suggested symbols:

-
- -
-
-
- - -
-

Strategy Parameters

-
- - {{param.key}} - - -
-
-
-
- - - - - -
+

{{isEditMode ? 'Edit Strategy' : 'Create Strategy'}}

+ +
+ +
+ + + Strategy Name + + Name is required + + + + Description + + + + + Strategy Type + + + {{type}} + + + Strategy type is required + + + +
+ +
+ + {{symbol}} + cancel + +
+ +
+ + Add Symbol + + + +
+ +
+

Suggested symbols:

+
+ +
+
+
+ + +
+

Strategy Parameters

+
+ + {{param.key}} + + +
+
+
+
+ + + + + +
diff --git a/apps/dashboard/src/app/pages/strategies/dialogs/strategy-dialog.component.ts b/apps/dashboard/src/app/pages/strategies/dialogs/strategy-dialog.component.ts index c9090fc..8c5a52e 100644 --- a/apps/dashboard/src/app/pages/strategies/dialogs/strategy-dialog.component.ts +++ b/apps/dashboard/src/app/pages/strategies/dialogs/strategy-dialog.component.ts @@ -1,178 +1,178 @@ -import { Component, Inject, OnInit } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { - FormBuilder, - FormGroup, - ReactiveFormsModule, - Validators -} from '@angular/forms'; -import { MatButtonModule } from '@angular/material/button'; -import { MatDialogModule, MAT_DIALOG_DATA, MatDialogRef } from '@angular/material/dialog'; -import { MatFormFieldModule } from '@angular/material/form-field'; -import { MatInputModule } from '@angular/material/input'; -import { MatSelectModule } from '@angular/material/select'; -import { MatChipsModule } from '@angular/material/chips'; -import { MatIconModule } from '@angular/material/icon'; -import { COMMA, ENTER } from '@angular/cdk/keycodes'; -import { MatAutocompleteModule } from '@angular/material/autocomplete'; -import { - StrategyService, - TradingStrategy -} from '../../../services/strategy.service'; - -@Component({ - selector: 'app-strategy-dialog', - standalone: true, - imports: [ - CommonModule, - ReactiveFormsModule, - MatButtonModule, - MatDialogModule, - MatFormFieldModule, - MatInputModule, - MatSelectModule, - MatChipsModule, - MatIconModule, - MatAutocompleteModule - ], - templateUrl: './strategy-dialog.component.html', - styleUrl: './strategy-dialog.component.css' -}) -export class StrategyDialogComponent implements OnInit { - strategyForm: FormGroup; - isEditMode: boolean = false; - strategyTypes: string[] = []; - availableSymbols: string[] = ['AAPL', 'MSFT', 'GOOGL', 'AMZN', 'TSLA', 'META', 'NVDA', 'SPY', 'QQQ']; - selectedSymbols: string[] = []; - separatorKeysCodes: number[] = [ENTER, COMMA]; - parameters: Record = {}; - - constructor( - private fb: FormBuilder, - private strategyService: StrategyService, - @Inject(MAT_DIALOG_DATA) public data: TradingStrategy | null, - private dialogRef: MatDialogRef - ) { - this.isEditMode = !!data; - - this.strategyForm = this.fb.group({ - name: ['', [Validators.required]], - description: [''], - type: ['', [Validators.required]], - // Dynamic parameters will be added based on strategy type - }); - - if (this.isEditMode && data) { - this.selectedSymbols = [...data.symbols]; - this.strategyForm.patchValue({ - name: data.name, - description: data.description, - type: data.type - }); - this.parameters = {...data.parameters}; - } - } - - ngOnInit(): void { - // In a real implementation, fetch available strategy types from the API - this.loadStrategyTypes(); - } - - loadStrategyTypes(): void { - // In a real implementation, this would call the API - this.strategyService.getStrategyTypes().subscribe({ - next: (response) => { - if (response.success) { - this.strategyTypes = response.data; - - // If editing, load parameters - if (this.isEditMode && this.data) { - this.onStrategyTypeChange(this.data.type); - } - } - }, - error: (error) => { - console.error('Error loading strategy types:', error); - // Fallback to hardcoded types - this.strategyTypes = ['MOVING_AVERAGE_CROSSOVER', 'MEAN_REVERSION', 'CUSTOM']; - } - }); - } - - onStrategyTypeChange(type: string): void { - // Get default parameters for this strategy type - this.strategyService.getStrategyParameters(type).subscribe({ - next: (response) => { - if (response.success) { - // If editing, merge default with existing - if (this.isEditMode && this.data) { - this.parameters = { - ...response.data, - ...this.data.parameters - }; - } else { - this.parameters = response.data; - } - } - }, - error: (error) => { - console.error('Error loading parameters:', error); - // Fallback to empty parameters - this.parameters = {}; - } - }); - } - - addSymbol(symbol: string): void { - if (!symbol || this.selectedSymbols.includes(symbol)) return; - this.selectedSymbols.push(symbol); - } - - removeSymbol(symbol: string): void { - this.selectedSymbols = this.selectedSymbols.filter(s => s !== symbol); - } - - onSubmit(): void { - if (this.strategyForm.invalid || this.selectedSymbols.length === 0) { - return; - } - - const formValue = this.strategyForm.value; - - const strategy: Partial = { - name: formValue.name, - description: formValue.description, - type: formValue.type, - symbols: this.selectedSymbols, - parameters: this.parameters, - }; - - if (this.isEditMode && this.data) { - this.strategyService.updateStrategy(this.data.id, strategy).subscribe({ - next: (response) => { - if (response.success) { - this.dialogRef.close(true); - } - }, - error: (error) => { - console.error('Error updating strategy:', error); - } - }); - } else { - this.strategyService.createStrategy(strategy).subscribe({ - next: (response) => { - if (response.success) { - this.dialogRef.close(true); - } - }, - error: (error) => { - console.error('Error creating strategy:', error); - } - }); - } - } - - updateParameter(key: string, value: any): void { - this.parameters[key] = value; - } -} +import { Component, Inject, OnInit } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { + FormBuilder, + FormGroup, + ReactiveFormsModule, + Validators +} from '@angular/forms'; +import { MatButtonModule } from '@angular/material/button'; +import { MatDialogModule, MAT_DIALOG_DATA, MatDialogRef } from '@angular/material/dialog'; +import { MatFormFieldModule } from '@angular/material/form-field'; +import { MatInputModule } from '@angular/material/input'; +import { MatSelectModule } from '@angular/material/select'; +import { MatChipsModule } from '@angular/material/chips'; +import { MatIconModule } from '@angular/material/icon'; +import { COMMA, ENTER } from '@angular/cdk/keycodes'; +import { MatAutocompleteModule } from '@angular/material/autocomplete'; +import { + StrategyService, + TradingStrategy +} from '../../../services/strategy.service'; + +@Component({ + selector: 'app-strategy-dialog', + standalone: true, + imports: [ + CommonModule, + ReactiveFormsModule, + MatButtonModule, + MatDialogModule, + MatFormFieldModule, + MatInputModule, + MatSelectModule, + MatChipsModule, + MatIconModule, + MatAutocompleteModule + ], + templateUrl: './strategy-dialog.component.html', + styleUrl: './strategy-dialog.component.css' +}) +export class StrategyDialogComponent implements OnInit { + strategyForm: FormGroup; + isEditMode: boolean = false; + strategyTypes: string[] = []; + availableSymbols: string[] = ['AAPL', 'MSFT', 'GOOGL', 'AMZN', 'TSLA', 'META', 'NVDA', 'SPY', 'QQQ']; + selectedSymbols: string[] = []; + separatorKeysCodes: number[] = [ENTER, COMMA]; + parameters: Record = {}; + + constructor( + private fb: FormBuilder, + private strategyService: StrategyService, + @Inject(MAT_DIALOG_DATA) public data: TradingStrategy | null, + private dialogRef: MatDialogRef + ) { + this.isEditMode = !!data; + + this.strategyForm = this.fb.group({ + name: ['', [Validators.required]], + description: [''], + type: ['', [Validators.required]], + // Dynamic parameters will be added based on strategy type + }); + + if (this.isEditMode && data) { + this.selectedSymbols = [...data.symbols]; + this.strategyForm.patchValue({ + name: data.name, + description: data.description, + type: data.type + }); + this.parameters = {...data.parameters}; + } + } + + ngOnInit(): void { + // In a real implementation, fetch available strategy types from the API + this.loadStrategyTypes(); + } + + loadStrategyTypes(): void { + // In a real implementation, this would call the API + this.strategyService.getStrategyTypes().subscribe({ + next: (response) => { + if (response.success) { + this.strategyTypes = response.data; + + // If editing, load parameters + if (this.isEditMode && this.data) { + this.onStrategyTypeChange(this.data.type); + } + } + }, + error: (error) => { + console.error('Error loading strategy types:', error); + // Fallback to hardcoded types + this.strategyTypes = ['MOVING_AVERAGE_CROSSOVER', 'MEAN_REVERSION', 'CUSTOM']; + } + }); + } + + onStrategyTypeChange(type: string): void { + // Get default parameters for this strategy type + this.strategyService.getStrategyParameters(type).subscribe({ + next: (response) => { + if (response.success) { + // If editing, merge default with existing + if (this.isEditMode && this.data) { + this.parameters = { + ...response.data, + ...this.data.parameters + }; + } else { + this.parameters = response.data; + } + } + }, + error: (error) => { + console.error('Error loading parameters:', error); + // Fallback to empty parameters + this.parameters = {}; + } + }); + } + + addSymbol(symbol: string): void { + if (!symbol || this.selectedSymbols.includes(symbol)) return; + this.selectedSymbols.push(symbol); + } + + removeSymbol(symbol: string): void { + this.selectedSymbols = this.selectedSymbols.filter(s => s !== symbol); + } + + onSubmit(): void { + if (this.strategyForm.invalid || this.selectedSymbols.length === 0) { + return; + } + + const formValue = this.strategyForm.value; + + const strategy: Partial = { + name: formValue.name, + description: formValue.description, + type: formValue.type, + symbols: this.selectedSymbols, + parameters: this.parameters, + }; + + if (this.isEditMode && this.data) { + this.strategyService.updateStrategy(this.data.id, strategy).subscribe({ + next: (response) => { + if (response.success) { + this.dialogRef.close(true); + } + }, + error: (error) => { + console.error('Error updating strategy:', error); + } + }); + } else { + this.strategyService.createStrategy(strategy).subscribe({ + next: (response) => { + if (response.success) { + this.dialogRef.close(true); + } + }, + error: (error) => { + console.error('Error creating strategy:', error); + } + }); + } + } + + updateParameter(key: string, value: any): void { + this.parameters[key] = value; + } +} diff --git a/apps/dashboard/src/app/pages/strategies/strategies.component.css b/apps/dashboard/src/app/pages/strategies/strategies.component.css index a924fef..3de2dd5 100644 --- a/apps/dashboard/src/app/pages/strategies/strategies.component.css +++ b/apps/dashboard/src/app/pages/strategies/strategies.component.css @@ -1 +1 @@ -/* Strategies specific styles */ +/* Strategies specific styles */ diff --git a/apps/dashboard/src/app/pages/strategies/strategies.component.html b/apps/dashboard/src/app/pages/strategies/strategies.component.html index 8d3c48e..30d948c 100644 --- a/apps/dashboard/src/app/pages/strategies/strategies.component.html +++ b/apps/dashboard/src/app/pages/strategies/strategies.component.html @@ -1,142 +1,142 @@ -
-
-
-

Trading Strategies

-

Configure and monitor your automated trading strategies

-
-
- - -
-
- - - - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Strategy -
{{strategy.name}}
-
{{strategy.description}}
-
Type{{strategy.type}}Symbols -
- - {{symbol}} - - - +{{strategy.symbols.length - 3}} more - -
-
Status -
- - {{strategy.status}} -
-
Performance -
-
- Return: - - {{strategy.performance.totalReturn | percent:'1.2-2'}} - -
-
- Win Rate: - {{strategy.performance.winRate | percent:'1.0-0'}} -
-
-
Actions -
- - - - - - - -
-
-
- - - -
- psychology -

No Strategies Yet

-

Create your first trading strategy to get started

- -
-
-
-
- - -
- -
- -
-
+
+
+
+

Trading Strategies

+

Configure and monitor your automated trading strategies

+
+
+ + +
+
+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Strategy +
{{strategy.name}}
+
{{strategy.description}}
+
Type{{strategy.type}}Symbols +
+ + {{symbol}} + + + +{{strategy.symbols.length - 3}} more + +
+
Status +
+ + {{strategy.status}} +
+
Performance +
+
+ Return: + + {{strategy.performance.totalReturn | percent:'1.2-2'}} + +
+
+ Win Rate: + {{strategy.performance.winRate | percent:'1.0-0'}} +
+
+
Actions +
+ + + + + + + +
+
+
+ + + +
+ psychology +

No Strategies Yet

+

Create your first trading strategy to get started

+ +
+
+
+
+ + +
+ +
+ +
+
diff --git a/apps/dashboard/src/app/pages/strategies/strategies.component.ts b/apps/dashboard/src/app/pages/strategies/strategies.component.ts index ee1939b..8d4efa4 100644 --- a/apps/dashboard/src/app/pages/strategies/strategies.component.ts +++ b/apps/dashboard/src/app/pages/strategies/strategies.component.ts @@ -1,148 +1,148 @@ -import { Component, OnInit } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatCardModule } from '@angular/material/card'; -import { MatIconModule } from '@angular/material/icon'; -import { MatButtonModule } from '@angular/material/button'; -import { MatTabsModule } from '@angular/material/tabs'; -import { MatTableModule } from '@angular/material/table'; -import { MatSortModule } from '@angular/material/sort'; -import { MatPaginatorModule } from '@angular/material/paginator'; -import { MatDialogModule, MatDialog } from '@angular/material/dialog'; -import { MatMenuModule } from '@angular/material/menu'; -import { MatChipsModule } from '@angular/material/chips'; -import { MatProgressBarModule } from '@angular/material/progress-bar'; -import { FormsModule, ReactiveFormsModule } from '@angular/forms'; -import { StrategyService, TradingStrategy } from '../../services/strategy.service'; -import { WebSocketService } from '../../services/websocket.service'; -import { StrategyDialogComponent } from './dialogs/strategy-dialog.component'; -import { BacktestDialogComponent } from './dialogs/backtest-dialog.component'; -import { StrategyDetailsComponent } from './strategy-details/strategy-details.component'; - -@Component({ - selector: 'app-strategies', - standalone: true, - imports: [ - CommonModule, - MatCardModule, - MatIconModule, - MatButtonModule, - MatTabsModule, - MatTableModule, - MatSortModule, - MatPaginatorModule, - MatDialogModule, - MatMenuModule, - MatChipsModule, - MatProgressBarModule, - FormsModule, - ReactiveFormsModule, - StrategyDetailsComponent - ], - templateUrl: './strategies.component.html', - styleUrl: './strategies.component.css' -}) -export class StrategiesComponent implements OnInit { - strategies: TradingStrategy[] = []; - displayedColumns: string[] = ['name', 'type', 'symbols', 'status', 'performance', 'actions']; - selectedStrategy: TradingStrategy | null = null; - isLoading = false; - - constructor( - private strategyService: StrategyService, - private webSocketService: WebSocketService, - private dialog: MatDialog - ) {} - - ngOnInit(): void { - this.loadStrategies(); - this.listenForStrategyUpdates(); - } - - loadStrategies(): void { - this.isLoading = true; - this.strategyService.getStrategies().subscribe({ - next: (response) => { - if (response.success) { - this.strategies = response.data; - } - this.isLoading = false; - }, - error: (error) => { - console.error('Error loading strategies:', error); - this.isLoading = false; - } - }); - } - - listenForStrategyUpdates(): void { - this.webSocketService.messages.subscribe(message => { - if (message.type === 'STRATEGY_CREATED' || - message.type === 'STRATEGY_UPDATED' || - message.type === 'STRATEGY_STATUS_CHANGED') { - // Refresh the strategy list when changes occur - this.loadStrategies(); - } - }); - } - - getStatusColor(status: string): string { - switch (status) { - case 'ACTIVE': return 'green'; - case 'PAUSED': return 'orange'; - case 'ERROR': return 'red'; - default: return 'gray'; - } - } - - openStrategyDialog(strategy?: TradingStrategy): void { - const dialogRef = this.dialog.open(StrategyDialogComponent, { - width: '600px', - data: strategy || null - }); - - dialogRef.afterClosed().subscribe(result => { - if (result) { - this.loadStrategies(); - } - }); - } - - openBacktestDialog(strategy?: TradingStrategy): void { - const dialogRef = this.dialog.open(BacktestDialogComponent, { - width: '800px', - data: strategy || null - }); - - dialogRef.afterClosed().subscribe(result => { - if (result) { - // Handle backtest result if needed - } - }); - } - - toggleStrategyStatus(strategy: TradingStrategy): void { - this.isLoading = true; - - if (strategy.status === 'ACTIVE') { - this.strategyService.pauseStrategy(strategy.id).subscribe({ - next: () => this.loadStrategies(), - error: (error) => { - console.error('Error pausing strategy:', error); - this.isLoading = false; - } - }); - } else { - this.strategyService.startStrategy(strategy.id).subscribe({ - next: () => this.loadStrategies(), - error: (error) => { - console.error('Error starting strategy:', error); - this.isLoading = false; - } - }); - } - } - - viewStrategyDetails(strategy: TradingStrategy): void { - this.selectedStrategy = strategy; - } -} +import { Component, OnInit } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatCardModule } from '@angular/material/card'; +import { MatIconModule } from '@angular/material/icon'; +import { MatButtonModule } from '@angular/material/button'; +import { MatTabsModule } from '@angular/material/tabs'; +import { MatTableModule } from '@angular/material/table'; +import { MatSortModule } from '@angular/material/sort'; +import { MatPaginatorModule } from '@angular/material/paginator'; +import { MatDialogModule, MatDialog } from '@angular/material/dialog'; +import { MatMenuModule } from '@angular/material/menu'; +import { MatChipsModule } from '@angular/material/chips'; +import { MatProgressBarModule } from '@angular/material/progress-bar'; +import { FormsModule, ReactiveFormsModule } from '@angular/forms'; +import { StrategyService, TradingStrategy } from '../../services/strategy.service'; +import { WebSocketService } from '../../services/websocket.service'; +import { StrategyDialogComponent } from './dialogs/strategy-dialog.component'; +import { BacktestDialogComponent } from './dialogs/backtest-dialog.component'; +import { StrategyDetailsComponent } from './strategy-details/strategy-details.component'; + +@Component({ + selector: 'app-strategies', + standalone: true, + imports: [ + CommonModule, + MatCardModule, + MatIconModule, + MatButtonModule, + MatTabsModule, + MatTableModule, + MatSortModule, + MatPaginatorModule, + MatDialogModule, + MatMenuModule, + MatChipsModule, + MatProgressBarModule, + FormsModule, + ReactiveFormsModule, + StrategyDetailsComponent + ], + templateUrl: './strategies.component.html', + styleUrl: './strategies.component.css' +}) +export class StrategiesComponent implements OnInit { + strategies: TradingStrategy[] = []; + displayedColumns: string[] = ['name', 'type', 'symbols', 'status', 'performance', 'actions']; + selectedStrategy: TradingStrategy | null = null; + isLoading = false; + + constructor( + private strategyService: StrategyService, + private webSocketService: WebSocketService, + private dialog: MatDialog + ) {} + + ngOnInit(): void { + this.loadStrategies(); + this.listenForStrategyUpdates(); + } + + loadStrategies(): void { + this.isLoading = true; + this.strategyService.getStrategies().subscribe({ + next: (response) => { + if (response.success) { + this.strategies = response.data; + } + this.isLoading = false; + }, + error: (error) => { + console.error('Error loading strategies:', error); + this.isLoading = false; + } + }); + } + + listenForStrategyUpdates(): void { + this.webSocketService.messages.subscribe(message => { + if (message.type === 'STRATEGY_CREATED' || + message.type === 'STRATEGY_UPDATED' || + message.type === 'STRATEGY_STATUS_CHANGED') { + // Refresh the strategy list when changes occur + this.loadStrategies(); + } + }); + } + + getStatusColor(status: string): string { + switch (status) { + case 'ACTIVE': return 'green'; + case 'PAUSED': return 'orange'; + case 'ERROR': return 'red'; + default: return 'gray'; + } + } + + openStrategyDialog(strategy?: TradingStrategy): void { + const dialogRef = this.dialog.open(StrategyDialogComponent, { + width: '600px', + data: strategy || null + }); + + dialogRef.afterClosed().subscribe(result => { + if (result) { + this.loadStrategies(); + } + }); + } + + openBacktestDialog(strategy?: TradingStrategy): void { + const dialogRef = this.dialog.open(BacktestDialogComponent, { + width: '800px', + data: strategy || null + }); + + dialogRef.afterClosed().subscribe(result => { + if (result) { + // Handle backtest result if needed + } + }); + } + + toggleStrategyStatus(strategy: TradingStrategy): void { + this.isLoading = true; + + if (strategy.status === 'ACTIVE') { + this.strategyService.pauseStrategy(strategy.id).subscribe({ + next: () => this.loadStrategies(), + error: (error) => { + console.error('Error pausing strategy:', error); + this.isLoading = false; + } + }); + } else { + this.strategyService.startStrategy(strategy.id).subscribe({ + next: () => this.loadStrategies(), + error: (error) => { + console.error('Error starting strategy:', error); + this.isLoading = false; + } + }); + } + } + + viewStrategyDetails(strategy: TradingStrategy): void { + this.selectedStrategy = strategy; + } +} diff --git a/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.css b/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.css index cc465c9..3ea29ae 100644 --- a/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.css +++ b/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.css @@ -1,16 +1,16 @@ -/* Strategy details specific styles */ -table { - width: 100%; - border-collapse: collapse; -} - -th { - font-weight: 600; - color: #4b5563; - font-size: 0.875rem; - border-bottom: 1px solid #e5e7eb; -} - -td { - border-bottom: 1px solid #e5e7eb; -} +/* Strategy details specific styles */ +table { + width: 100%; + border-collapse: collapse; +} + +th { + font-weight: 600; + color: #4b5563; + font-size: 0.875rem; + border-bottom: 1px solid #e5e7eb; +} + +td { + border-bottom: 1px solid #e5e7eb; +} diff --git a/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.html b/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.html index 3ff8fc8..ebf57ea 100644 --- a/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.html +++ b/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.html @@ -1,214 +1,214 @@ -
-
- - -
-
-

{{strategy.name}}

-

{{strategy.description}}

-
-
- - - {{strategy.status}} - -
-
- -
-
-

Type

-

{{strategy.type}}

-
-
-

Created

-

{{strategy.createdAt | date:'medium'}}

-
-
-

Last Updated

-

{{strategy.updatedAt | date:'medium'}}

-
-
-

Symbols

-
- {{symbol}} -
-
-
-
- - - -

Performance

-
-
-

Return

-

- {{performance.totalReturn | percent:'1.2-2'}} -

-
-
-

Win Rate

-

{{performance.winRate | percent:'1.0-0'}}

-
-
-

Sharpe Ratio

-

{{performance.sharpeRatio | number:'1.2-2'}}

-
-
-

Max Drawdown

-

{{performance.maxDrawdown | percent:'1.2-2'}}

-
-
-

Total Trades

-

{{performance.totalTrades}}

-
-
-

Sortino Ratio

-

{{performance.sortinoRatio | number:'1.2-2'}}

-
-
- - - -
- - - - -
-
-
- - - -

Strategy Parameters

-
-
-

{{param.key}}

-

{{param.value}}

-
-
-
- -
-

Backtest Results

- - - - - - - - - - - - -
- - - - - - -
- - - - - - - - - - - - - - - - - - - - - - -
TimeSymbolActionPriceQuantityConfidence
{{signal.timestamp | date:'short'}}{{signal.symbol}} - - {{signal.action}} - - ${{signal.price | number:'1.2-2'}}{{signal.quantity}}{{signal.confidence | percent:'1.0-0'}}
-
- - - -
-
- - - -
- - - - - - - - - - - - - - - - - - - - - - -
SymbolEntryExitQuantityP&LP&L %
{{trade.symbol}} - ${{trade.entryPrice | number:'1.2-2'}} @ {{trade.entryTime | date:'short'}} - - ${{trade.exitPrice | number:'1.2-2'}} @ {{trade.exitTime | date:'short'}} - {{trade.quantity}} - ${{trade.pnl | number:'1.2-2'}} - - {{trade.pnlPercent | number:'1.2-2'}}% -
-
- - - -
-
-
-
-
- - -
- psychology -

No strategy selected

-
-
+
+
+ + +
+
+

{{strategy.name}}

+

{{strategy.description}}

+
+
+ + + {{strategy.status}} + +
+
+ +
+
+

Type

+

{{strategy.type}}

+
+
+

Created

+

{{strategy.createdAt | date:'medium'}}

+
+
+

Last Updated

+

{{strategy.updatedAt | date:'medium'}}

+
+
+

Symbols

+
+ {{symbol}} +
+
+
+
+ + + +

Performance

+
+
+

Return

+

+ {{performance.totalReturn | percent:'1.2-2'}} +

+
+
+

Win Rate

+

{{performance.winRate | percent:'1.0-0'}}

+
+
+

Sharpe Ratio

+

{{performance.sharpeRatio | number:'1.2-2'}}

+
+
+

Max Drawdown

+

{{performance.maxDrawdown | percent:'1.2-2'}}

+
+
+

Total Trades

+

{{performance.totalTrades}}

+
+
+

Sortino Ratio

+

{{performance.sortinoRatio | number:'1.2-2'}}

+
+
+ + + +
+ + + + +
+
+
+ + + +

Strategy Parameters

+
+
+

{{param.key}}

+

{{param.value}}

+
+
+
+ +
+

Backtest Results

+ + + + + + + + + + + + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + +
TimeSymbolActionPriceQuantityConfidence
{{signal.timestamp | date:'short'}}{{signal.symbol}} + + {{signal.action}} + + ${{signal.price | number:'1.2-2'}}{{signal.quantity}}{{signal.confidence | percent:'1.0-0'}}
+
+ + + +
+
+ + + +
+ + + + + + + + + + + + + + + + + + + + + + +
SymbolEntryExitQuantityP&LP&L %
{{trade.symbol}} + ${{trade.entryPrice | number:'1.2-2'}} @ {{trade.entryTime | date:'short'}} + + ${{trade.exitPrice | number:'1.2-2'}} @ {{trade.exitTime | date:'short'}} + {{trade.quantity}} + ${{trade.pnl | number:'1.2-2'}} + + {{trade.pnlPercent | number:'1.2-2'}}% +
+
+ + + +
+
+
+
+
+ + +
+ psychology +

No strategy selected

+
+
diff --git a/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.ts b/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.ts index b9bff5e..b42716d 100644 --- a/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.ts +++ b/apps/dashboard/src/app/pages/strategies/strategy-details/strategy-details.component.ts @@ -1,381 +1,381 @@ -import { Component, Input, OnChanges, SimpleChanges } from '@angular/core'; -import { CommonModule } from '@angular/common'; -import { MatCardModule } from '@angular/material/card'; -import { MatTabsModule } from '@angular/material/tabs'; -import { MatIconModule } from '@angular/material/icon'; -import { MatButtonModule } from '@angular/material/button'; -import { MatTableModule } from '@angular/material/table'; -import { MatChipsModule } from '@angular/material/chips'; -import { MatProgressBarModule } from '@angular/material/progress-bar'; -import { MatDividerModule } from '@angular/material/divider'; -import { MatDialog } from '@angular/material/dialog'; -import { BacktestResult, TradingStrategy, StrategyService } from '../../../services/strategy.service'; -import { WebSocketService } from '../../../services/websocket.service'; -import { EquityChartComponent } from '../components/equity-chart.component'; -import { DrawdownChartComponent } from '../components/drawdown-chart.component'; -import { TradesTableComponent } from '../components/trades-table.component'; -import { PerformanceMetricsComponent } from '../components/performance-metrics.component'; -import { StrategyDialogComponent } from '../dialogs/strategy-dialog.component'; -import { BacktestDialogComponent } from '../dialogs/backtest-dialog.component'; - -@Component({ - selector: 'app-strategy-details', - standalone: true, - imports: [ - CommonModule, - MatCardModule, - MatTabsModule, - MatIconModule, - MatButtonModule, - MatTableModule, - MatChipsModule, - MatProgressBarModule, - MatDividerModule, - EquityChartComponent, - DrawdownChartComponent, - TradesTableComponent, - PerformanceMetricsComponent - ], - templateUrl: './strategy-details.component.html', - styleUrl: './strategy-details.component.css' -}) -export class StrategyDetailsComponent implements OnChanges { - @Input() strategy: TradingStrategy | null = null; - - signals: any[] = []; - trades: any[] = []; - performance: any = {}; - isLoadingSignals = false; - isLoadingTrades = false; - backtestResult: BacktestResult | undefined; - - constructor( - private strategyService: StrategyService, - private webSocketService: WebSocketService, - private dialog: MatDialog - ) {} - - ngOnChanges(changes: SimpleChanges): void { - if (changes['strategy'] && this.strategy) { - this.loadStrategyData(); - this.listenForUpdates(); - } - } - - loadStrategyData(): void { - if (!this.strategy) return; - - // In a real implementation, these would call API methods to fetch the data - this.loadSignals(); - this.loadTrades(); - this.loadPerformance(); - } - loadSignals(): void { - if (!this.strategy) return; - - this.isLoadingSignals = true; - - // First check if we can get real signals from the API - this.strategyService.getStrategySignals(this.strategy.id) - .subscribe({ - next: (response) => { - if (response.success && response.data && response.data.length > 0) { - this.signals = response.data; - } else { - // Fallback to mock data if no real signals available - this.signals = this.generateMockSignals(); - } - this.isLoadingSignals = false; - }, - error: (error) => { - console.error('Error loading signals', error); - // Fallback to mock data on error - this.signals = this.generateMockSignals(); - this.isLoadingSignals = false; - } - }); - } - - loadTrades(): void { - if (!this.strategy) return; - - this.isLoadingTrades = true; - - // First check if we can get real trades from the API - this.strategyService.getStrategyTrades(this.strategy.id) - .subscribe({ - next: (response) => { - if (response.success && response.data && response.data.length > 0) { - this.trades = response.data; - } else { - // Fallback to mock data if no real trades available - this.trades = this.generateMockTrades(); - } - this.isLoadingTrades = false; - }, - error: (error) => { - console.error('Error loading trades', error); - // Fallback to mock data on error - this.trades = this.generateMockTrades(); - this.isLoadingTrades = false; - } - }); - } - - loadPerformance(): void { - // This would be an API call in a real implementation - this.performance = { - totalReturn: this.strategy?.performance.totalReturn || 0, - winRate: this.strategy?.performance.winRate || 0, - sharpeRatio: this.strategy?.performance.sharpeRatio || 0, - maxDrawdown: this.strategy?.performance.maxDrawdown || 0, - totalTrades: this.strategy?.performance.totalTrades || 0, - // Additional metrics that would come from the API - dailyReturn: 0.0012, - volatility: 0.008, - sortinoRatio: 1.2, - calmarRatio: 0.7 - }; - } - listenForUpdates(): void { - if (!this.strategy) return; - - // Subscribe to strategy signals - this.webSocketService.getStrategySignals(this.strategy.id) - .subscribe((signal: any) => { - // Add the new signal to the top of the list - this.signals = [signal, ...this.signals.slice(0, 9)]; // Keep only the latest 10 signals - }); - - // Subscribe to strategy trades - this.webSocketService.getStrategyTrades(this.strategy.id) - .subscribe((trade: any) => { - // Add the new trade to the top of the list - this.trades = [trade, ...this.trades.slice(0, 9)]; // Keep only the latest 10 trades - - // Update performance metrics - this.updatePerformanceMetrics(); - }); - - // Subscribe to strategy status updates - this.webSocketService.getStrategyUpdates() - .subscribe((update: any) => { - if (update.strategyId === this.strategy?.id) { - // Update strategy status if changed - if (update.status && this.strategy && this.strategy.status !== update.status) { - this.strategy.status = update.status; - } - - // Update other fields if present - if (update.performance && this.strategy) { - this.strategy.performance = { - ...this.strategy.performance, - ...update.performance - }; - this.performance = { - ...this.performance, - ...update.performance - }; - } - } - }); - - console.log('WebSocket listeners for strategy updates initialized'); - } - - /** - * Update performance metrics when new trades come in - */ - private updatePerformanceMetrics(): void { - if (!this.strategy || this.trades.length === 0) return; - - // Calculate basic metrics - const winningTrades = this.trades.filter(t => t.pnl > 0); - const losingTrades = this.trades.filter(t => t.pnl < 0); - - const totalPnl = this.trades.reduce((sum, trade) => sum + trade.pnl, 0); - const winRate = winningTrades.length / this.trades.length; - - // Update performance data - const currentPerformance = this.performance || {}; - this.performance = { - ...currentPerformance, - totalTrades: this.trades.length, - winRate: winRate, - totalReturn: (currentPerformance.totalReturn || 0) + (totalPnl / 10000) // Approximate - }; - - // Update strategy performance as well - if (this.strategy && this.strategy.performance) { - this.strategy.performance = { - ...this.strategy.performance, - totalTrades: this.trades.length, - winRate: winRate - }; - } - } - - getStatusColor(status: string): string { - switch (status) { - case 'ACTIVE': return 'green'; - case 'PAUSED': return 'orange'; - case 'ERROR': return 'red'; - default: return 'gray'; - } - } - - getSignalColor(action: string): string { - switch (action) { - case 'BUY': return 'green'; - case 'SELL': return 'red'; - default: return 'gray'; - } - } - - /** - * Open the backtest dialog to run a backtest for this strategy - */ - openBacktestDialog(): void { - if (!this.strategy) return; - - const dialogRef = this.dialog.open(BacktestDialogComponent, { - width: '800px', - data: this.strategy - }); - - dialogRef.afterClosed().subscribe(result => { - if (result) { - // Store the backtest result for visualization - this.backtestResult = result; - } - }); - } - - /** - * Open the strategy edit dialog - */ - openEditDialog(): void { - if (!this.strategy) return; - - const dialogRef = this.dialog.open(StrategyDialogComponent, { - width: '600px', - data: this.strategy - }); - - dialogRef.afterClosed().subscribe(result => { - if (result) { - // Refresh strategy data after edit - this.loadStrategyData(); - } - }); - } - - /** - * Start the strategy - */ - activateStrategy(): void { - if (!this.strategy) return; - - this.strategyService.startStrategy(this.strategy.id).subscribe({ - next: (response) => { - if (response.success) { - this.strategy!.status = 'ACTIVE'; - } - }, - error: (error) => { - console.error('Error starting strategy:', error); - } - }); - } - - /** - * Pause the strategy - */ - pauseStrategy(): void { - if (!this.strategy) return; - - this.strategyService.pauseStrategy(this.strategy.id).subscribe({ - next: (response) => { - if (response.success) { - this.strategy!.status = 'PAUSED'; - } - }, - error: (error) => { - console.error('Error pausing strategy:', error); - } - }); - } - - /** - * Stop the strategy - */ - stopStrategy(): void { - if (!this.strategy) return; - - this.strategyService.stopStrategy(this.strategy.id).subscribe({ - next: (response) => { - if (response.success) { - this.strategy!.status = 'INACTIVE'; - } - }, - error: (error) => { - console.error('Error stopping strategy:', error); - } - }); - } - - // Methods to generate mock data - private generateMockSignals(): any[] { - if (!this.strategy) return []; - - const signals = []; - const actions = ['BUY', 'SELL', 'HOLD']; - const now = new Date(); - - for (let i = 0; i < 10; i++) { - const symbol = this.strategy.symbols[Math.floor(Math.random() * this.strategy.symbols.length)]; - const action = actions[Math.floor(Math.random() * actions.length)]; - - signals.push({ - id: `sig_${i}`, - symbol, - action, - confidence: 0.7 + Math.random() * 0.3, - price: 100 + Math.random() * 50, - timestamp: new Date(now.getTime() - i * 1000 * 60 * 30), // 30 min intervals - quantity: Math.floor(10 + Math.random() * 90) - }); - } - - return signals; - } - - private generateMockTrades(): any[] { - if (!this.strategy) return []; - - const trades = []; - const now = new Date(); - - for (let i = 0; i < 10; i++) { - const symbol = this.strategy.symbols[Math.floor(Math.random() * this.strategy.symbols.length)]; - const entryPrice = 100 + Math.random() * 50; - const exitPrice = entryPrice * (1 + (Math.random() * 0.1 - 0.05)); // -5% to +5% - const quantity = Math.floor(10 + Math.random() * 90); - const pnl = (exitPrice - entryPrice) * quantity; - - trades.push({ - id: `trade_${i}`, - symbol, - entryPrice, - entryTime: new Date(now.getTime() - (i + 5) * 1000 * 60 * 60), // Hourly intervals - exitPrice, - exitTime: new Date(now.getTime() - i * 1000 * 60 * 60), - quantity, - pnl, - pnlPercent: ((exitPrice - entryPrice) / entryPrice) * 100 - }); - } - - return trades; - } -} +import { Component, Input, OnChanges, SimpleChanges } from '@angular/core'; +import { CommonModule } from '@angular/common'; +import { MatCardModule } from '@angular/material/card'; +import { MatTabsModule } from '@angular/material/tabs'; +import { MatIconModule } from '@angular/material/icon'; +import { MatButtonModule } from '@angular/material/button'; +import { MatTableModule } from '@angular/material/table'; +import { MatChipsModule } from '@angular/material/chips'; +import { MatProgressBarModule } from '@angular/material/progress-bar'; +import { MatDividerModule } from '@angular/material/divider'; +import { MatDialog } from '@angular/material/dialog'; +import { BacktestResult, TradingStrategy, StrategyService } from '../../../services/strategy.service'; +import { WebSocketService } from '../../../services/websocket.service'; +import { EquityChartComponent } from '../components/equity-chart.component'; +import { DrawdownChartComponent } from '../components/drawdown-chart.component'; +import { TradesTableComponent } from '../components/trades-table.component'; +import { PerformanceMetricsComponent } from '../components/performance-metrics.component'; +import { StrategyDialogComponent } from '../dialogs/strategy-dialog.component'; +import { BacktestDialogComponent } from '../dialogs/backtest-dialog.component'; + +@Component({ + selector: 'app-strategy-details', + standalone: true, + imports: [ + CommonModule, + MatCardModule, + MatTabsModule, + MatIconModule, + MatButtonModule, + MatTableModule, + MatChipsModule, + MatProgressBarModule, + MatDividerModule, + EquityChartComponent, + DrawdownChartComponent, + TradesTableComponent, + PerformanceMetricsComponent + ], + templateUrl: './strategy-details.component.html', + styleUrl: './strategy-details.component.css' +}) +export class StrategyDetailsComponent implements OnChanges { + @Input() strategy: TradingStrategy | null = null; + + signals: any[] = []; + trades: any[] = []; + performance: any = {}; + isLoadingSignals = false; + isLoadingTrades = false; + backtestResult: BacktestResult | undefined; + + constructor( + private strategyService: StrategyService, + private webSocketService: WebSocketService, + private dialog: MatDialog + ) {} + + ngOnChanges(changes: SimpleChanges): void { + if (changes['strategy'] && this.strategy) { + this.loadStrategyData(); + this.listenForUpdates(); + } + } + + loadStrategyData(): void { + if (!this.strategy) return; + + // In a real implementation, these would call API methods to fetch the data + this.loadSignals(); + this.loadTrades(); + this.loadPerformance(); + } + loadSignals(): void { + if (!this.strategy) return; + + this.isLoadingSignals = true; + + // First check if we can get real signals from the API + this.strategyService.getStrategySignals(this.strategy.id) + .subscribe({ + next: (response) => { + if (response.success && response.data && response.data.length > 0) { + this.signals = response.data; + } else { + // Fallback to mock data if no real signals available + this.signals = this.generateMockSignals(); + } + this.isLoadingSignals = false; + }, + error: (error) => { + console.error('Error loading signals', error); + // Fallback to mock data on error + this.signals = this.generateMockSignals(); + this.isLoadingSignals = false; + } + }); + } + + loadTrades(): void { + if (!this.strategy) return; + + this.isLoadingTrades = true; + + // First check if we can get real trades from the API + this.strategyService.getStrategyTrades(this.strategy.id) + .subscribe({ + next: (response) => { + if (response.success && response.data && response.data.length > 0) { + this.trades = response.data; + } else { + // Fallback to mock data if no real trades available + this.trades = this.generateMockTrades(); + } + this.isLoadingTrades = false; + }, + error: (error) => { + console.error('Error loading trades', error); + // Fallback to mock data on error + this.trades = this.generateMockTrades(); + this.isLoadingTrades = false; + } + }); + } + + loadPerformance(): void { + // This would be an API call in a real implementation + this.performance = { + totalReturn: this.strategy?.performance.totalReturn || 0, + winRate: this.strategy?.performance.winRate || 0, + sharpeRatio: this.strategy?.performance.sharpeRatio || 0, + maxDrawdown: this.strategy?.performance.maxDrawdown || 0, + totalTrades: this.strategy?.performance.totalTrades || 0, + // Additional metrics that would come from the API + dailyReturn: 0.0012, + volatility: 0.008, + sortinoRatio: 1.2, + calmarRatio: 0.7 + }; + } + listenForUpdates(): void { + if (!this.strategy) return; + + // Subscribe to strategy signals + this.webSocketService.getStrategySignals(this.strategy.id) + .subscribe((signal: any) => { + // Add the new signal to the top of the list + this.signals = [signal, ...this.signals.slice(0, 9)]; // Keep only the latest 10 signals + }); + + // Subscribe to strategy trades + this.webSocketService.getStrategyTrades(this.strategy.id) + .subscribe((trade: any) => { + // Add the new trade to the top of the list + this.trades = [trade, ...this.trades.slice(0, 9)]; // Keep only the latest 10 trades + + // Update performance metrics + this.updatePerformanceMetrics(); + }); + + // Subscribe to strategy status updates + this.webSocketService.getStrategyUpdates() + .subscribe((update: any) => { + if (update.strategyId === this.strategy?.id) { + // Update strategy status if changed + if (update.status && this.strategy && this.strategy.status !== update.status) { + this.strategy.status = update.status; + } + + // Update other fields if present + if (update.performance && this.strategy) { + this.strategy.performance = { + ...this.strategy.performance, + ...update.performance + }; + this.performance = { + ...this.performance, + ...update.performance + }; + } + } + }); + + console.log('WebSocket listeners for strategy updates initialized'); + } + + /** + * Update performance metrics when new trades come in + */ + private updatePerformanceMetrics(): void { + if (!this.strategy || this.trades.length === 0) return; + + // Calculate basic metrics + const winningTrades = this.trades.filter(t => t.pnl > 0); + const losingTrades = this.trades.filter(t => t.pnl < 0); + + const totalPnl = this.trades.reduce((sum, trade) => sum + trade.pnl, 0); + const winRate = winningTrades.length / this.trades.length; + + // Update performance data + const currentPerformance = this.performance || {}; + this.performance = { + ...currentPerformance, + totalTrades: this.trades.length, + winRate: winRate, + totalReturn: (currentPerformance.totalReturn || 0) + (totalPnl / 10000) // Approximate + }; + + // Update strategy performance as well + if (this.strategy && this.strategy.performance) { + this.strategy.performance = { + ...this.strategy.performance, + totalTrades: this.trades.length, + winRate: winRate + }; + } + } + + getStatusColor(status: string): string { + switch (status) { + case 'ACTIVE': return 'green'; + case 'PAUSED': return 'orange'; + case 'ERROR': return 'red'; + default: return 'gray'; + } + } + + getSignalColor(action: string): string { + switch (action) { + case 'BUY': return 'green'; + case 'SELL': return 'red'; + default: return 'gray'; + } + } + + /** + * Open the backtest dialog to run a backtest for this strategy + */ + openBacktestDialog(): void { + if (!this.strategy) return; + + const dialogRef = this.dialog.open(BacktestDialogComponent, { + width: '800px', + data: this.strategy + }); + + dialogRef.afterClosed().subscribe(result => { + if (result) { + // Store the backtest result for visualization + this.backtestResult = result; + } + }); + } + + /** + * Open the strategy edit dialog + */ + openEditDialog(): void { + if (!this.strategy) return; + + const dialogRef = this.dialog.open(StrategyDialogComponent, { + width: '600px', + data: this.strategy + }); + + dialogRef.afterClosed().subscribe(result => { + if (result) { + // Refresh strategy data after edit + this.loadStrategyData(); + } + }); + } + + /** + * Start the strategy + */ + activateStrategy(): void { + if (!this.strategy) return; + + this.strategyService.startStrategy(this.strategy.id).subscribe({ + next: (response) => { + if (response.success) { + this.strategy!.status = 'ACTIVE'; + } + }, + error: (error) => { + console.error('Error starting strategy:', error); + } + }); + } + + /** + * Pause the strategy + */ + pauseStrategy(): void { + if (!this.strategy) return; + + this.strategyService.pauseStrategy(this.strategy.id).subscribe({ + next: (response) => { + if (response.success) { + this.strategy!.status = 'PAUSED'; + } + }, + error: (error) => { + console.error('Error pausing strategy:', error); + } + }); + } + + /** + * Stop the strategy + */ + stopStrategy(): void { + if (!this.strategy) return; + + this.strategyService.stopStrategy(this.strategy.id).subscribe({ + next: (response) => { + if (response.success) { + this.strategy!.status = 'INACTIVE'; + } + }, + error: (error) => { + console.error('Error stopping strategy:', error); + } + }); + } + + // Methods to generate mock data + private generateMockSignals(): any[] { + if (!this.strategy) return []; + + const signals = []; + const actions = ['BUY', 'SELL', 'HOLD']; + const now = new Date(); + + for (let i = 0; i < 10; i++) { + const symbol = this.strategy.symbols[Math.floor(Math.random() * this.strategy.symbols.length)]; + const action = actions[Math.floor(Math.random() * actions.length)]; + + signals.push({ + id: `sig_${i}`, + symbol, + action, + confidence: 0.7 + Math.random() * 0.3, + price: 100 + Math.random() * 50, + timestamp: new Date(now.getTime() - i * 1000 * 60 * 30), // 30 min intervals + quantity: Math.floor(10 + Math.random() * 90) + }); + } + + return signals; + } + + private generateMockTrades(): any[] { + if (!this.strategy) return []; + + const trades = []; + const now = new Date(); + + for (let i = 0; i < 10; i++) { + const symbol = this.strategy.symbols[Math.floor(Math.random() * this.strategy.symbols.length)]; + const entryPrice = 100 + Math.random() * 50; + const exitPrice = entryPrice * (1 + (Math.random() * 0.1 - 0.05)); // -5% to +5% + const quantity = Math.floor(10 + Math.random() * 90); + const pnl = (exitPrice - entryPrice) * quantity; + + trades.push({ + id: `trade_${i}`, + symbol, + entryPrice, + entryTime: new Date(now.getTime() - (i + 5) * 1000 * 60 * 60), // Hourly intervals + exitPrice, + exitTime: new Date(now.getTime() - i * 1000 * 60 * 60), + quantity, + pnl, + pnlPercent: ((exitPrice - entryPrice) / entryPrice) * 100 + }); + } + + return trades; + } +} diff --git a/apps/dashboard/src/app/services/api.service.ts b/apps/dashboard/src/app/services/api.service.ts index e6ab258..f40796f 100644 --- a/apps/dashboard/src/app/services/api.service.ts +++ b/apps/dashboard/src/app/services/api.service.ts @@ -1,98 +1,98 @@ -import { Injectable } from '@angular/core'; -import { HttpClient } from '@angular/common/http'; -import { Observable } from 'rxjs'; - -export interface RiskThresholds { - maxPositionSize: number; - maxDailyLoss: number; - maxPortfolioRisk: number; - volatilityLimit: number; -} - -export interface RiskEvaluation { - symbol: string; - positionValue: number; - positionRisk: number; - violations: string[]; - riskLevel: 'LOW' | 'MEDIUM' | 'HIGH'; -} - -export interface MarketData { - symbol: string; - price: number; - change: number; - changePercent: number; - volume: number; - timestamp: string; -} - -@Injectable({ - providedIn: 'root' -}) -export class ApiService { - private readonly baseUrls = { - riskGuardian: 'http://localhost:3002', - strategyOrchestrator: 'http://localhost:3003', - marketDataGateway: 'http://localhost:3001' - }; - - constructor(private http: HttpClient) {} - - // Risk Guardian API - getRiskThresholds(): Observable<{ success: boolean; data: RiskThresholds }> { - return this.http.get<{ success: boolean; data: RiskThresholds }>( - `${this.baseUrls.riskGuardian}/api/risk/thresholds` - ); - } - - updateRiskThresholds(thresholds: RiskThresholds): Observable<{ success: boolean; data: RiskThresholds }> { - return this.http.put<{ success: boolean; data: RiskThresholds }>( - `${this.baseUrls.riskGuardian}/api/risk/thresholds`, - thresholds - ); - } - - evaluateRisk(params: { - symbol: string; - quantity: number; - price: number; - portfolioValue: number; - }): Observable<{ success: boolean; data: RiskEvaluation }> { - return this.http.post<{ success: boolean; data: RiskEvaluation }>( - `${this.baseUrls.riskGuardian}/api/risk/evaluate`, - params - ); - } - - getRiskHistory(): Observable<{ success: boolean; data: RiskEvaluation[] }> { - return this.http.get<{ success: boolean; data: RiskEvaluation[] }>( - `${this.baseUrls.riskGuardian}/api/risk/history` - ); - } - - // Strategy Orchestrator API - getStrategies(): Observable<{ success: boolean; data: any[] }> { - return this.http.get<{ success: boolean; data: any[] }>( - `${this.baseUrls.strategyOrchestrator}/api/strategies` - ); - } - - createStrategy(strategy: any): Observable<{ success: boolean; data: any }> { - return this.http.post<{ success: boolean; data: any }>( - `${this.baseUrls.strategyOrchestrator}/api/strategies`, - strategy - ); - } - // Market Data Gateway API - getMarketData(symbols: string[] = ['AAPL', 'GOOGL', 'MSFT', 'TSLA', 'AMZN']): Observable<{ success: boolean; data: MarketData[] }> { - const symbolsParam = symbols.join(','); - return this.http.get<{ success: boolean; data: MarketData[] }>( - `${this.baseUrls.marketDataGateway}/api/market-data?symbols=${symbolsParam}` - ); - } - - // Health checks - checkServiceHealth(service: 'riskGuardian' | 'strategyOrchestrator' | 'marketDataGateway'): Observable { - return this.http.get(`${this.baseUrls[service]}/health`); - } -} +import { Injectable } from '@angular/core'; +import { HttpClient } from '@angular/common/http'; +import { Observable } from 'rxjs'; + +export interface RiskThresholds { + maxPositionSize: number; + maxDailyLoss: number; + maxPortfolioRisk: number; + volatilityLimit: number; +} + +export interface RiskEvaluation { + symbol: string; + positionValue: number; + positionRisk: number; + violations: string[]; + riskLevel: 'LOW' | 'MEDIUM' | 'HIGH'; +} + +export interface MarketData { + symbol: string; + price: number; + change: number; + changePercent: number; + volume: number; + timestamp: string; +} + +@Injectable({ + providedIn: 'root' +}) +export class ApiService { + private readonly baseUrls = { + riskGuardian: 'http://localhost:3002', + strategyOrchestrator: 'http://localhost:3003', + marketDataGateway: 'http://localhost:3001' + }; + + constructor(private http: HttpClient) {} + + // Risk Guardian API + getRiskThresholds(): Observable<{ success: boolean; data: RiskThresholds }> { + return this.http.get<{ success: boolean; data: RiskThresholds }>( + `${this.baseUrls.riskGuardian}/api/risk/thresholds` + ); + } + + updateRiskThresholds(thresholds: RiskThresholds): Observable<{ success: boolean; data: RiskThresholds }> { + return this.http.put<{ success: boolean; data: RiskThresholds }>( + `${this.baseUrls.riskGuardian}/api/risk/thresholds`, + thresholds + ); + } + + evaluateRisk(params: { + symbol: string; + quantity: number; + price: number; + portfolioValue: number; + }): Observable<{ success: boolean; data: RiskEvaluation }> { + return this.http.post<{ success: boolean; data: RiskEvaluation }>( + `${this.baseUrls.riskGuardian}/api/risk/evaluate`, + params + ); + } + + getRiskHistory(): Observable<{ success: boolean; data: RiskEvaluation[] }> { + return this.http.get<{ success: boolean; data: RiskEvaluation[] }>( + `${this.baseUrls.riskGuardian}/api/risk/history` + ); + } + + // Strategy Orchestrator API + getStrategies(): Observable<{ success: boolean; data: any[] }> { + return this.http.get<{ success: boolean; data: any[] }>( + `${this.baseUrls.strategyOrchestrator}/api/strategies` + ); + } + + createStrategy(strategy: any): Observable<{ success: boolean; data: any }> { + return this.http.post<{ success: boolean; data: any }>( + `${this.baseUrls.strategyOrchestrator}/api/strategies`, + strategy + ); + } + // Market Data Gateway API + getMarketData(symbols: string[] = ['AAPL', 'GOOGL', 'MSFT', 'TSLA', 'AMZN']): Observable<{ success: boolean; data: MarketData[] }> { + const symbolsParam = symbols.join(','); + return this.http.get<{ success: boolean; data: MarketData[] }>( + `${this.baseUrls.marketDataGateway}/api/market-data?symbols=${symbolsParam}` + ); + } + + // Health checks + checkServiceHealth(service: 'riskGuardian' | 'strategyOrchestrator' | 'marketDataGateway'): Observable { + return this.http.get(`${this.baseUrls[service]}/health`); + } +} diff --git a/apps/dashboard/src/app/services/notification.service.ts b/apps/dashboard/src/app/services/notification.service.ts index 9e1a1ef..0e45708 100644 --- a/apps/dashboard/src/app/services/notification.service.ts +++ b/apps/dashboard/src/app/services/notification.service.ts @@ -1,193 +1,193 @@ -import { Injectable, signal, inject } from '@angular/core'; -import { MatSnackBar } from '@angular/material/snack-bar'; -import { WebSocketService, RiskAlert } from './websocket.service'; -import { Subscription } from 'rxjs'; - -export interface Notification { - id: string; - type: 'info' | 'warning' | 'error' | 'success'; - title: string; - message: string; - timestamp: Date; - read: boolean; -} - -@Injectable({ - providedIn: 'root' -}) -export class NotificationService { - private snackBar = inject(MatSnackBar); - private webSocketService = inject(WebSocketService); - private riskAlertsSubscription?: Subscription; - - // Reactive state - public notifications = signal([]); - public unreadCount = signal(0); - - constructor() { - this.initializeRiskAlerts(); - } - - private initializeRiskAlerts() { - // Subscribe to risk alerts from WebSocket - this.riskAlertsSubscription = this.webSocketService.getRiskAlerts().subscribe({ - next: (alert: RiskAlert) => { - this.handleRiskAlert(alert); - }, - error: (err) => { - console.error('Risk alert subscription error:', err); - } - }); - } - - private handleRiskAlert(alert: RiskAlert) { - const notification: Notification = { - id: alert.id, - type: this.mapSeverityToType(alert.severity), - title: `Risk Alert: ${alert.symbol}`, - message: alert.message, - timestamp: new Date(alert.timestamp), - read: false - }; - - this.addNotification(notification); - this.showSnackBarAlert(notification); - } - - private mapSeverityToType(severity: string): 'info' | 'warning' | 'error' | 'success' { - switch (severity) { - case 'HIGH': return 'error'; - case 'MEDIUM': return 'warning'; - case 'LOW': return 'info'; - default: return 'info'; - } - } - - private showSnackBarAlert(notification: Notification) { - const actionText = notification.type === 'error' ? 'Review' : 'Dismiss'; - const duration = notification.type === 'error' ? 10000 : 5000; - - this.snackBar.open( - `${notification.title}: ${notification.message}`, - actionText, - { - duration, - panelClass: [`snack-${notification.type}`] - } - ); - } - - // Public methods - addNotification(notification: Notification) { - const current = this.notifications(); - const updated = [notification, ...current].slice(0, 50); // Keep only latest 50 - this.notifications.set(updated); - this.updateUnreadCount(); - } - - markAsRead(notificationId: string) { - const current = this.notifications(); - const updated = current.map(n => - n.id === notificationId ? { ...n, read: true } : n - ); - this.notifications.set(updated); - this.updateUnreadCount(); - } - - markAllAsRead() { - const current = this.notifications(); - const updated = current.map(n => ({ ...n, read: true })); - this.notifications.set(updated); - this.updateUnreadCount(); - } - - clearNotification(notificationId: string) { - const current = this.notifications(); - const updated = current.filter(n => n.id !== notificationId); - this.notifications.set(updated); - this.updateUnreadCount(); - } - - clearAllNotifications() { - this.notifications.set([]); - this.unreadCount.set(0); - } - - private updateUnreadCount() { - const unread = this.notifications().filter(n => !n.read).length; - this.unreadCount.set(unread); - } - - // Manual notification methods - showSuccess(title: string, message: string) { - const notification: Notification = { - id: this.generateId(), - type: 'success', - title, - message, - timestamp: new Date(), - read: false - }; - this.addNotification(notification); - this.snackBar.open(`${title}: ${message}`, 'Dismiss', { - duration: 3000, - panelClass: ['snack-success'] - }); - } - - showError(title: string, message: string) { - const notification: Notification = { - id: this.generateId(), - type: 'error', - title, - message, - timestamp: new Date(), - read: false - }; - this.addNotification(notification); - this.snackBar.open(`${title}: ${message}`, 'Dismiss', { - duration: 8000, - panelClass: ['snack-error'] - }); - } - - showWarning(title: string, message: string) { - const notification: Notification = { - id: this.generateId(), - type: 'warning', - title, - message, - timestamp: new Date(), - read: false - }; - this.addNotification(notification); - this.snackBar.open(`${title}: ${message}`, 'Dismiss', { - duration: 5000, - panelClass: ['snack-warning'] - }); - } - - showInfo(title: string, message: string) { - const notification: Notification = { - id: this.generateId(), - type: 'info', - title, - message, - timestamp: new Date(), - read: false - }; - this.addNotification(notification); - this.snackBar.open(`${title}: ${message}`, 'Dismiss', { - duration: 4000, - panelClass: ['snack-info'] - }); - } - - private generateId(): string { - return Date.now().toString(36) + Math.random().toString(36).substr(2); - } - - ngOnDestroy() { - this.riskAlertsSubscription?.unsubscribe(); - } -} +import { Injectable, signal, inject } from '@angular/core'; +import { MatSnackBar } from '@angular/material/snack-bar'; +import { WebSocketService, RiskAlert } from './websocket.service'; +import { Subscription } from 'rxjs'; + +export interface Notification { + id: string; + type: 'info' | 'warning' | 'error' | 'success'; + title: string; + message: string; + timestamp: Date; + read: boolean; +} + +@Injectable({ + providedIn: 'root' +}) +export class NotificationService { + private snackBar = inject(MatSnackBar); + private webSocketService = inject(WebSocketService); + private riskAlertsSubscription?: Subscription; + + // Reactive state + public notifications = signal([]); + public unreadCount = signal(0); + + constructor() { + this.initializeRiskAlerts(); + } + + private initializeRiskAlerts() { + // Subscribe to risk alerts from WebSocket + this.riskAlertsSubscription = this.webSocketService.getRiskAlerts().subscribe({ + next: (alert: RiskAlert) => { + this.handleRiskAlert(alert); + }, + error: (err) => { + console.error('Risk alert subscription error:', err); + } + }); + } + + private handleRiskAlert(alert: RiskAlert) { + const notification: Notification = { + id: alert.id, + type: this.mapSeverityToType(alert.severity), + title: `Risk Alert: ${alert.symbol}`, + message: alert.message, + timestamp: new Date(alert.timestamp), + read: false + }; + + this.addNotification(notification); + this.showSnackBarAlert(notification); + } + + private mapSeverityToType(severity: string): 'info' | 'warning' | 'error' | 'success' { + switch (severity) { + case 'HIGH': return 'error'; + case 'MEDIUM': return 'warning'; + case 'LOW': return 'info'; + default: return 'info'; + } + } + + private showSnackBarAlert(notification: Notification) { + const actionText = notification.type === 'error' ? 'Review' : 'Dismiss'; + const duration = notification.type === 'error' ? 10000 : 5000; + + this.snackBar.open( + `${notification.title}: ${notification.message}`, + actionText, + { + duration, + panelClass: [`snack-${notification.type}`] + } + ); + } + + // Public methods + addNotification(notification: Notification) { + const current = this.notifications(); + const updated = [notification, ...current].slice(0, 50); // Keep only latest 50 + this.notifications.set(updated); + this.updateUnreadCount(); + } + + markAsRead(notificationId: string) { + const current = this.notifications(); + const updated = current.map(n => + n.id === notificationId ? { ...n, read: true } : n + ); + this.notifications.set(updated); + this.updateUnreadCount(); + } + + markAllAsRead() { + const current = this.notifications(); + const updated = current.map(n => ({ ...n, read: true })); + this.notifications.set(updated); + this.updateUnreadCount(); + } + + clearNotification(notificationId: string) { + const current = this.notifications(); + const updated = current.filter(n => n.id !== notificationId); + this.notifications.set(updated); + this.updateUnreadCount(); + } + + clearAllNotifications() { + this.notifications.set([]); + this.unreadCount.set(0); + } + + private updateUnreadCount() { + const unread = this.notifications().filter(n => !n.read).length; + this.unreadCount.set(unread); + } + + // Manual notification methods + showSuccess(title: string, message: string) { + const notification: Notification = { + id: this.generateId(), + type: 'success', + title, + message, + timestamp: new Date(), + read: false + }; + this.addNotification(notification); + this.snackBar.open(`${title}: ${message}`, 'Dismiss', { + duration: 3000, + panelClass: ['snack-success'] + }); + } + + showError(title: string, message: string) { + const notification: Notification = { + id: this.generateId(), + type: 'error', + title, + message, + timestamp: new Date(), + read: false + }; + this.addNotification(notification); + this.snackBar.open(`${title}: ${message}`, 'Dismiss', { + duration: 8000, + panelClass: ['snack-error'] + }); + } + + showWarning(title: string, message: string) { + const notification: Notification = { + id: this.generateId(), + type: 'warning', + title, + message, + timestamp: new Date(), + read: false + }; + this.addNotification(notification); + this.snackBar.open(`${title}: ${message}`, 'Dismiss', { + duration: 5000, + panelClass: ['snack-warning'] + }); + } + + showInfo(title: string, message: string) { + const notification: Notification = { + id: this.generateId(), + type: 'info', + title, + message, + timestamp: new Date(), + read: false + }; + this.addNotification(notification); + this.snackBar.open(`${title}: ${message}`, 'Dismiss', { + duration: 4000, + panelClass: ['snack-info'] + }); + } + + private generateId(): string { + return Date.now().toString(36) + Math.random().toString(36).substr(2); + } + + ngOnDestroy() { + this.riskAlertsSubscription?.unsubscribe(); + } +} diff --git a/apps/dashboard/src/app/services/strategy.service.ts b/apps/dashboard/src/app/services/strategy.service.ts index 74f0704..dedd9b7 100644 --- a/apps/dashboard/src/app/services/strategy.service.ts +++ b/apps/dashboard/src/app/services/strategy.service.ts @@ -1,209 +1,209 @@ -import { Injectable } from '@angular/core'; -import { HttpClient } from '@angular/common/http'; -import { Observable } from 'rxjs'; - -export interface TradingStrategy { - id: string; - name: string; - description: string; - status: 'ACTIVE' | 'INACTIVE' | 'PAUSED' | 'ERROR'; - type: string; - symbols: string[]; - parameters: Record; - performance: { - totalTrades: number; - winRate: number; - totalReturn: number; - sharpeRatio: number; - maxDrawdown: number; - }; - createdAt: Date; - updatedAt: Date; -} - -export interface BacktestRequest { - strategyType: string; - strategyParams: Record; - symbols: string[]; - startDate: Date | string; - endDate: Date | string; - initialCapital: number; - dataResolution: '1m' | '5m' | '15m' | '30m' | '1h' | '4h' | '1d'; - commission: number; - slippage: number; - mode: 'event' | 'vector'; -} - -export interface BacktestResult { - strategyId: string; - startDate: Date; - endDate: Date; - duration: number; - initialCapital: number; - finalCapital: number; - totalReturn: number; - annualizedReturn: number; - sharpeRatio: number; - maxDrawdown: number; - maxDrawdownDuration: number; - winRate: number; - totalTrades: number; - winningTrades: number; - losingTrades: number; - averageWinningTrade: number; - averageLosingTrade: number; - profitFactor: number; - dailyReturns: Array<{ date: Date; return: number }>; - trades: Array<{ - symbol: string; - entryTime: Date; - entryPrice: number; - exitTime: Date; - exitPrice: number; - quantity: number; - pnl: number; - pnlPercent: number; - }>; - // Advanced metrics - sortinoRatio?: number; - calmarRatio?: number; - omegaRatio?: number; - cagr?: number; - volatility?: number; - ulcerIndex?: number; -} - -interface ApiResponse { - success: boolean; - data: T; - error?: string; -} - -@Injectable({ - providedIn: 'root' -}) -export class StrategyService { - private apiBaseUrl = '/api'; // Will be proxied to the correct backend endpoint - - constructor(private http: HttpClient) { } - - // Strategy Management - getStrategies(): Observable> { - return this.http.get>(`${this.apiBaseUrl}/strategies`); - } - - getStrategy(id: string): Observable> { - return this.http.get>(`${this.apiBaseUrl}/strategies/${id}`); - } - - createStrategy(strategy: Partial): Observable> { - return this.http.post>(`${this.apiBaseUrl}/strategies`, strategy); - } - - updateStrategy(id: string, updates: Partial): Observable> { - return this.http.put>(`${this.apiBaseUrl}/strategies/${id}`, updates); - } - - startStrategy(id: string): Observable> { - return this.http.post>(`${this.apiBaseUrl}/strategies/${id}/start`, {}); - } - - stopStrategy(id: string): Observable> { - return this.http.post>(`${this.apiBaseUrl}/strategies/${id}/stop`, {}); - } - - pauseStrategy(id: string): Observable> { - return this.http.post>(`${this.apiBaseUrl}/strategies/${id}/pause`, {}); - } - - // Backtest Management - getStrategyTypes(): Observable> { - return this.http.get>(`${this.apiBaseUrl}/strategy-types`); - } - - getStrategyParameters(type: string): Observable>> { - return this.http.get>>(`${this.apiBaseUrl}/strategy-parameters/${type}`); - } - - runBacktest(request: BacktestRequest): Observable> { - return this.http.post>(`${this.apiBaseUrl}/backtest`, request); - } - getBacktestResult(id: string): Observable> { - return this.http.get>(`${this.apiBaseUrl}/backtest/${id}`); - } - - optimizeStrategy( - baseRequest: BacktestRequest, - parameterGrid: Record - ): Observable }>>> { - return this.http.post }>>>( - `${this.apiBaseUrl}/backtest/optimize`, - { baseRequest, parameterGrid } - ); - } - - // Strategy Signals and Trades - getStrategySignals(strategyId: string): Observable>> { - return this.http.get>(`${this.apiBaseUrl}/strategies/${strategyId}/signals`); - } - - getStrategyTrades(strategyId: string): Observable>> { - return this.http.get>(`${this.apiBaseUrl}/strategies/${strategyId}/trades`); - } - - // Helper methods for common transformations - formatBacktestRequest(formData: any): BacktestRequest { - // Handle date formatting and parameter conversion - return { - ...formData, - startDate: formData.startDate instanceof Date ? formData.startDate.toISOString() : formData.startDate, - endDate: formData.endDate instanceof Date ? formData.endDate.toISOString() : formData.endDate, - strategyParams: this.convertParameterTypes(formData.strategyType, formData.strategyParams) - }; - } - - private convertParameterTypes(strategyType: string, params: Record): Record { - // Convert string parameters to correct types based on strategy requirements - const result: Record = {}; - - for (const [key, value] of Object.entries(params)) { - if (typeof value === 'string') { - // Try to convert to number if it looks like a number - if (!isNaN(Number(value))) { - result[key] = Number(value); - } else if (value.toLowerCase() === 'true') { - result[key] = true; - } else if (value.toLowerCase() === 'false') { - result[key] = false; - } else { - result[key] = value; - } - } else { - result[key] = value; - } - } - - return result; - } -} +import { Injectable } from '@angular/core'; +import { HttpClient } from '@angular/common/http'; +import { Observable } from 'rxjs'; + +export interface TradingStrategy { + id: string; + name: string; + description: string; + status: 'ACTIVE' | 'INACTIVE' | 'PAUSED' | 'ERROR'; + type: string; + symbols: string[]; + parameters: Record; + performance: { + totalTrades: number; + winRate: number; + totalReturn: number; + sharpeRatio: number; + maxDrawdown: number; + }; + createdAt: Date; + updatedAt: Date; +} + +export interface BacktestRequest { + strategyType: string; + strategyParams: Record; + symbols: string[]; + startDate: Date | string; + endDate: Date | string; + initialCapital: number; + dataResolution: '1m' | '5m' | '15m' | '30m' | '1h' | '4h' | '1d'; + commission: number; + slippage: number; + mode: 'event' | 'vector'; +} + +export interface BacktestResult { + strategyId: string; + startDate: Date; + endDate: Date; + duration: number; + initialCapital: number; + finalCapital: number; + totalReturn: number; + annualizedReturn: number; + sharpeRatio: number; + maxDrawdown: number; + maxDrawdownDuration: number; + winRate: number; + totalTrades: number; + winningTrades: number; + losingTrades: number; + averageWinningTrade: number; + averageLosingTrade: number; + profitFactor: number; + dailyReturns: Array<{ date: Date; return: number }>; + trades: Array<{ + symbol: string; + entryTime: Date; + entryPrice: number; + exitTime: Date; + exitPrice: number; + quantity: number; + pnl: number; + pnlPercent: number; + }>; + // Advanced metrics + sortinoRatio?: number; + calmarRatio?: number; + omegaRatio?: number; + cagr?: number; + volatility?: number; + ulcerIndex?: number; +} + +interface ApiResponse { + success: boolean; + data: T; + error?: string; +} + +@Injectable({ + providedIn: 'root' +}) +export class StrategyService { + private apiBaseUrl = '/api'; // Will be proxied to the correct backend endpoint + + constructor(private http: HttpClient) { } + + // Strategy Management + getStrategies(): Observable> { + return this.http.get>(`${this.apiBaseUrl}/strategies`); + } + + getStrategy(id: string): Observable> { + return this.http.get>(`${this.apiBaseUrl}/strategies/${id}`); + } + + createStrategy(strategy: Partial): Observable> { + return this.http.post>(`${this.apiBaseUrl}/strategies`, strategy); + } + + updateStrategy(id: string, updates: Partial): Observable> { + return this.http.put>(`${this.apiBaseUrl}/strategies/${id}`, updates); + } + + startStrategy(id: string): Observable> { + return this.http.post>(`${this.apiBaseUrl}/strategies/${id}/start`, {}); + } + + stopStrategy(id: string): Observable> { + return this.http.post>(`${this.apiBaseUrl}/strategies/${id}/stop`, {}); + } + + pauseStrategy(id: string): Observable> { + return this.http.post>(`${this.apiBaseUrl}/strategies/${id}/pause`, {}); + } + + // Backtest Management + getStrategyTypes(): Observable> { + return this.http.get>(`${this.apiBaseUrl}/strategy-types`); + } + + getStrategyParameters(type: string): Observable>> { + return this.http.get>>(`${this.apiBaseUrl}/strategy-parameters/${type}`); + } + + runBacktest(request: BacktestRequest): Observable> { + return this.http.post>(`${this.apiBaseUrl}/backtest`, request); + } + getBacktestResult(id: string): Observable> { + return this.http.get>(`${this.apiBaseUrl}/backtest/${id}`); + } + + optimizeStrategy( + baseRequest: BacktestRequest, + parameterGrid: Record + ): Observable }>>> { + return this.http.post }>>>( + `${this.apiBaseUrl}/backtest/optimize`, + { baseRequest, parameterGrid } + ); + } + + // Strategy Signals and Trades + getStrategySignals(strategyId: string): Observable>> { + return this.http.get>(`${this.apiBaseUrl}/strategies/${strategyId}/signals`); + } + + getStrategyTrades(strategyId: string): Observable>> { + return this.http.get>(`${this.apiBaseUrl}/strategies/${strategyId}/trades`); + } + + // Helper methods for common transformations + formatBacktestRequest(formData: any): BacktestRequest { + // Handle date formatting and parameter conversion + return { + ...formData, + startDate: formData.startDate instanceof Date ? formData.startDate.toISOString() : formData.startDate, + endDate: formData.endDate instanceof Date ? formData.endDate.toISOString() : formData.endDate, + strategyParams: this.convertParameterTypes(formData.strategyType, formData.strategyParams) + }; + } + + private convertParameterTypes(strategyType: string, params: Record): Record { + // Convert string parameters to correct types based on strategy requirements + const result: Record = {}; + + for (const [key, value] of Object.entries(params)) { + if (typeof value === 'string') { + // Try to convert to number if it looks like a number + if (!isNaN(Number(value))) { + result[key] = Number(value); + } else if (value.toLowerCase() === 'true') { + result[key] = true; + } else if (value.toLowerCase() === 'false') { + result[key] = false; + } else { + result[key] = value; + } + } else { + result[key] = value; + } + } + + return result; + } +} diff --git a/apps/dashboard/src/app/services/websocket.service.ts b/apps/dashboard/src/app/services/websocket.service.ts index 78be558..893e534 100644 --- a/apps/dashboard/src/app/services/websocket.service.ts +++ b/apps/dashboard/src/app/services/websocket.service.ts @@ -1,218 +1,218 @@ -import { Injectable, signal } from '@angular/core'; -import { BehaviorSubject, Observable, Subject } from 'rxjs'; -import { filter, map } from 'rxjs/operators'; - -export interface WebSocketMessage { - type: string; - data: any; - timestamp: string; -} - -export interface MarketDataUpdate { - symbol: string; - price: number; - change: number; - changePercent: number; - volume: number; - timestamp: string; -} - -export interface RiskAlert { - id: string; - symbol: string; - alertType: 'POSITION_LIMIT' | 'DAILY_LOSS' | 'VOLATILITY' | 'PORTFOLIO_RISK'; - message: string; - severity: 'LOW' | 'MEDIUM' | 'HIGH'; - timestamp: string; -} - -@Injectable({ - providedIn: 'root' -}) -export class WebSocketService { - private readonly WS_ENDPOINTS = { - marketData: 'ws://localhost:3001/ws', - riskGuardian: 'ws://localhost:3002/ws', - strategyOrchestrator: 'ws://localhost:3003/ws' - }; - - private connections = new Map(); - private messageSubjects = new Map>(); - - // Connection status signals - public isConnected = signal(false); - public connectionStatus = signal<{ [key: string]: boolean }>({ - marketData: false, - riskGuardian: false, - strategyOrchestrator: false - }); - - constructor() { - this.initializeConnections(); - } - - private initializeConnections() { - // Initialize WebSocket connections for all services - Object.entries(this.WS_ENDPOINTS).forEach(([service, url]) => { - this.connect(service, url); - }); - } - - private connect(serviceName: string, url: string) { - try { - const ws = new WebSocket(url); - const messageSubject = new Subject(); - - ws.onopen = () => { - console.log(`Connected to ${serviceName} WebSocket`); - this.updateConnectionStatus(serviceName, true); - }; - - ws.onmessage = (event) => { - try { - const message: WebSocketMessage = JSON.parse(event.data); - messageSubject.next(message); - } catch (error) { - console.error(`Failed to parse WebSocket message from ${serviceName}:`, error); - } - }; - - ws.onclose = () => { - console.log(`Disconnected from ${serviceName} WebSocket`); - this.updateConnectionStatus(serviceName, false); - - // Attempt to reconnect after 5 seconds - setTimeout(() => { - this.connect(serviceName, url); - }, 5000); - }; - - ws.onerror = (error) => { - console.error(`WebSocket error for ${serviceName}:`, error); - this.updateConnectionStatus(serviceName, false); - }; - - this.connections.set(serviceName, ws); - this.messageSubjects.set(serviceName, messageSubject); - - } catch (error) { - console.error(`Failed to connect to ${serviceName} WebSocket:`, error); - this.updateConnectionStatus(serviceName, false); - } - } - - private updateConnectionStatus(serviceName: string, isConnected: boolean) { - const currentStatus = this.connectionStatus(); - const newStatus = { ...currentStatus, [serviceName]: isConnected }; - this.connectionStatus.set(newStatus); - - // Update overall connection status - const overallConnected = Object.values(newStatus).some(status => status); - this.isConnected.set(overallConnected); - } - - // Market Data Updates - getMarketDataUpdates(): Observable { - const subject = this.messageSubjects.get('marketData'); - if (!subject) { - throw new Error('Market data WebSocket not initialized'); - } - - return subject.asObservable().pipe( - filter(message => message.type === 'market_data_update'), - map(message => message.data as MarketDataUpdate) - ); - } - - // Risk Alerts - getRiskAlerts(): Observable { - const subject = this.messageSubjects.get('riskGuardian'); - if (!subject) { - throw new Error('Risk Guardian WebSocket not initialized'); - } - - return subject.asObservable().pipe( - filter(message => message.type === 'risk_alert'), - map(message => message.data as RiskAlert) - ); - } - // Strategy Updates - getStrategyUpdates(): Observable { - const subject = this.messageSubjects.get('strategyOrchestrator'); - if (!subject) { - throw new Error('Strategy Orchestrator WebSocket not initialized'); - } - - return subject.asObservable().pipe( - filter(message => message.type === 'strategy_update'), - map(message => message.data) - ); - } - - // Strategy Signals - getStrategySignals(strategyId?: string): Observable { - const subject = this.messageSubjects.get('strategyOrchestrator'); - if (!subject) { - throw new Error('Strategy Orchestrator WebSocket not initialized'); - } - - return subject.asObservable().pipe( - filter(message => - message.type === 'strategy_signal' && - (!strategyId || message.data.strategyId === strategyId) - ), - map(message => message.data) - ); - } - - // Strategy Trades - getStrategyTrades(strategyId?: string): Observable { - const subject = this.messageSubjects.get('strategyOrchestrator'); - if (!subject) { - throw new Error('Strategy Orchestrator WebSocket not initialized'); - } - - return subject.asObservable().pipe( - filter(message => - message.type === 'strategy_trade' && - (!strategyId || message.data.strategyId === strategyId) - ), - map(message => message.data) - ); - } - - // All strategy-related messages, useful for components that need all types - getAllStrategyMessages(): Observable { - const subject = this.messageSubjects.get('strategyOrchestrator'); - if (!subject) { - throw new Error('Strategy Orchestrator WebSocket not initialized'); - } - - return subject.asObservable().pipe( - filter(message => - message.type.startsWith('strategy_') - ) - ); - } - - // Send messages - sendMessage(serviceName: string, message: any) { - const ws = this.connections.get(serviceName); - if (ws && ws.readyState === WebSocket.OPEN) { - ws.send(JSON.stringify(message)); - } else { - console.warn(`Cannot send message to ${serviceName}: WebSocket not connected`); - } - } - - // Cleanup - disconnect() { - this.connections.forEach((ws, serviceName) => { - if (ws.readyState === WebSocket.OPEN) { - ws.close(); - } - }); - this.connections.clear(); - this.messageSubjects.clear(); - } -} +import { Injectable, signal } from '@angular/core'; +import { BehaviorSubject, Observable, Subject } from 'rxjs'; +import { filter, map } from 'rxjs/operators'; + +export interface WebSocketMessage { + type: string; + data: any; + timestamp: string; +} + +export interface MarketDataUpdate { + symbol: string; + price: number; + change: number; + changePercent: number; + volume: number; + timestamp: string; +} + +export interface RiskAlert { + id: string; + symbol: string; + alertType: 'POSITION_LIMIT' | 'DAILY_LOSS' | 'VOLATILITY' | 'PORTFOLIO_RISK'; + message: string; + severity: 'LOW' | 'MEDIUM' | 'HIGH'; + timestamp: string; +} + +@Injectable({ + providedIn: 'root' +}) +export class WebSocketService { + private readonly WS_ENDPOINTS = { + marketData: 'ws://localhost:3001/ws', + riskGuardian: 'ws://localhost:3002/ws', + strategyOrchestrator: 'ws://localhost:3003/ws' + }; + + private connections = new Map(); + private messageSubjects = new Map>(); + + // Connection status signals + public isConnected = signal(false); + public connectionStatus = signal<{ [key: string]: boolean }>({ + marketData: false, + riskGuardian: false, + strategyOrchestrator: false + }); + + constructor() { + this.initializeConnections(); + } + + private initializeConnections() { + // Initialize WebSocket connections for all services + Object.entries(this.WS_ENDPOINTS).forEach(([service, url]) => { + this.connect(service, url); + }); + } + + private connect(serviceName: string, url: string) { + try { + const ws = new WebSocket(url); + const messageSubject = new Subject(); + + ws.onopen = () => { + console.log(`Connected to ${serviceName} WebSocket`); + this.updateConnectionStatus(serviceName, true); + }; + + ws.onmessage = (event) => { + try { + const message: WebSocketMessage = JSON.parse(event.data); + messageSubject.next(message); + } catch (error) { + console.error(`Failed to parse WebSocket message from ${serviceName}:`, error); + } + }; + + ws.onclose = () => { + console.log(`Disconnected from ${serviceName} WebSocket`); + this.updateConnectionStatus(serviceName, false); + + // Attempt to reconnect after 5 seconds + setTimeout(() => { + this.connect(serviceName, url); + }, 5000); + }; + + ws.onerror = (error) => { + console.error(`WebSocket error for ${serviceName}:`, error); + this.updateConnectionStatus(serviceName, false); + }; + + this.connections.set(serviceName, ws); + this.messageSubjects.set(serviceName, messageSubject); + + } catch (error) { + console.error(`Failed to connect to ${serviceName} WebSocket:`, error); + this.updateConnectionStatus(serviceName, false); + } + } + + private updateConnectionStatus(serviceName: string, isConnected: boolean) { + const currentStatus = this.connectionStatus(); + const newStatus = { ...currentStatus, [serviceName]: isConnected }; + this.connectionStatus.set(newStatus); + + // Update overall connection status + const overallConnected = Object.values(newStatus).some(status => status); + this.isConnected.set(overallConnected); + } + + // Market Data Updates + getMarketDataUpdates(): Observable { + const subject = this.messageSubjects.get('marketData'); + if (!subject) { + throw new Error('Market data WebSocket not initialized'); + } + + return subject.asObservable().pipe( + filter(message => message.type === 'market_data_update'), + map(message => message.data as MarketDataUpdate) + ); + } + + // Risk Alerts + getRiskAlerts(): Observable { + const subject = this.messageSubjects.get('riskGuardian'); + if (!subject) { + throw new Error('Risk Guardian WebSocket not initialized'); + } + + return subject.asObservable().pipe( + filter(message => message.type === 'risk_alert'), + map(message => message.data as RiskAlert) + ); + } + // Strategy Updates + getStrategyUpdates(): Observable { + const subject = this.messageSubjects.get('strategyOrchestrator'); + if (!subject) { + throw new Error('Strategy Orchestrator WebSocket not initialized'); + } + + return subject.asObservable().pipe( + filter(message => message.type === 'strategy_update'), + map(message => message.data) + ); + } + + // Strategy Signals + getStrategySignals(strategyId?: string): Observable { + const subject = this.messageSubjects.get('strategyOrchestrator'); + if (!subject) { + throw new Error('Strategy Orchestrator WebSocket not initialized'); + } + + return subject.asObservable().pipe( + filter(message => + message.type === 'strategy_signal' && + (!strategyId || message.data.strategyId === strategyId) + ), + map(message => message.data) + ); + } + + // Strategy Trades + getStrategyTrades(strategyId?: string): Observable { + const subject = this.messageSubjects.get('strategyOrchestrator'); + if (!subject) { + throw new Error('Strategy Orchestrator WebSocket not initialized'); + } + + return subject.asObservable().pipe( + filter(message => + message.type === 'strategy_trade' && + (!strategyId || message.data.strategyId === strategyId) + ), + map(message => message.data) + ); + } + + // All strategy-related messages, useful for components that need all types + getAllStrategyMessages(): Observable { + const subject = this.messageSubjects.get('strategyOrchestrator'); + if (!subject) { + throw new Error('Strategy Orchestrator WebSocket not initialized'); + } + + return subject.asObservable().pipe( + filter(message => + message.type.startsWith('strategy_') + ) + ); + } + + // Send messages + sendMessage(serviceName: string, message: any) { + const ws = this.connections.get(serviceName); + if (ws && ws.readyState === WebSocket.OPEN) { + ws.send(JSON.stringify(message)); + } else { + console.warn(`Cannot send message to ${serviceName}: WebSocket not connected`); + } + } + + // Cleanup + disconnect() { + this.connections.forEach((ws, serviceName) => { + if (ws.readyState === WebSocket.OPEN) { + ws.close(); + } + }); + this.connections.clear(); + this.messageSubjects.clear(); + } +} diff --git a/apps/dashboard/src/index.html b/apps/dashboard/src/index.html index 03d26a6..19a6b80 100644 --- a/apps/dashboard/src/index.html +++ b/apps/dashboard/src/index.html @@ -1,16 +1,16 @@ - - - - - Trading Dashboard - - - - - - - - - - - + + + + + Trading Dashboard + + + + + + + + + + + diff --git a/apps/dashboard/src/main.ts b/apps/dashboard/src/main.ts index 5df75f9..fd28efd 100644 --- a/apps/dashboard/src/main.ts +++ b/apps/dashboard/src/main.ts @@ -1,6 +1,6 @@ -import { bootstrapApplication } from '@angular/platform-browser'; -import { appConfig } from './app/app.config'; -import { App } from './app/app'; - -bootstrapApplication(App, appConfig) - .catch((err) => console.error(err)); +import { bootstrapApplication } from '@angular/platform-browser'; +import { appConfig } from './app/app.config'; +import { App } from './app/app'; + +bootstrapApplication(App, appConfig) + .catch((err) => console.error(err)); diff --git a/apps/dashboard/src/styles.css b/apps/dashboard/src/styles.css index 7170b06..d74fee5 100644 --- a/apps/dashboard/src/styles.css +++ b/apps/dashboard/src/styles.css @@ -1,89 +1,89 @@ -@import "tailwindcss"; - -/* Custom base styles */ -html, body { - height: 100%; - margin: 0; - font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif; - background-color: #f9fafb; -} - -/* Angular Material integration styles */ -.mat-sidenav-container { - background-color: transparent; -} - -.mat-sidenav { - border-radius: 0; -} - -.mat-toolbar { - background-color: white; - color: #374151; -} - -.mat-mdc-button.w-full { - width: 100%; - text-align: left; - justify-content: flex-start; -} - -.mat-mdc-card { - border-radius: 8px; - box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1); -} - -.mat-mdc-tab-group .mat-mdc-tab-header { - border-bottom: 1px solid #e5e7eb; -} - -.mat-mdc-chip.chip-green { - background-color: #dcfce7 !important; - color: #166534 !important; -} - -.mat-mdc-chip.chip-blue { - background-color: #dbeafe !important; - color: #1e40af !important; -} - -.mat-mdc-table { - border-radius: 8px; - overflow: hidden; -} - -.mat-mdc-header-row { - background-color: #f9fafb; -} - -.mat-mdc-row:hover { - background-color: #f9fafb; -} - -/* Dark mode overrides */ -.dark .mat-toolbar { - background-color: #1f2937; - color: #f9fafb; -} - -.dark .mat-mdc-tab-group .mat-mdc-tab-header { - border-bottom: 1px solid #4b5563; -} - -.dark .mat-mdc-header-row { - background-color: #1f2937; -} - -.dark .mat-mdc-row:hover { - background-color: #374151; -} - -.dark .mat-mdc-card { - background-color: #1f2937; - color: #f9fafb; -} - -.dark .mat-mdc-table { - background-color: #1f2937; - color: #f9fafb; -} +@import "tailwindcss"; + +/* Custom base styles */ +html, body { + height: 100%; + margin: 0; + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, sans-serif; + background-color: #f9fafb; +} + +/* Angular Material integration styles */ +.mat-sidenav-container { + background-color: transparent; +} + +.mat-sidenav { + border-radius: 0; +} + +.mat-toolbar { + background-color: white; + color: #374151; +} + +.mat-mdc-button.w-full { + width: 100%; + text-align: left; + justify-content: flex-start; +} + +.mat-mdc-card { + border-radius: 8px; + box-shadow: 0 1px 3px 0 rgb(0 0 0 / 0.1), 0 1px 2px -1px rgb(0 0 0 / 0.1); +} + +.mat-mdc-tab-group .mat-mdc-tab-header { + border-bottom: 1px solid #e5e7eb; +} + +.mat-mdc-chip.chip-green { + background-color: #dcfce7 !important; + color: #166534 !important; +} + +.mat-mdc-chip.chip-blue { + background-color: #dbeafe !important; + color: #1e40af !important; +} + +.mat-mdc-table { + border-radius: 8px; + overflow: hidden; +} + +.mat-mdc-header-row { + background-color: #f9fafb; +} + +.mat-mdc-row:hover { + background-color: #f9fafb; +} + +/* Dark mode overrides */ +.dark .mat-toolbar { + background-color: #1f2937; + color: #f9fafb; +} + +.dark .mat-mdc-tab-group .mat-mdc-tab-header { + border-bottom: 1px solid #4b5563; +} + +.dark .mat-mdc-header-row { + background-color: #1f2937; +} + +.dark .mat-mdc-row:hover { + background-color: #374151; +} + +.dark .mat-mdc-card { + background-color: #1f2937; + color: #f9fafb; +} + +.dark .mat-mdc-table { + background-color: #1f2937; + color: #f9fafb; +} diff --git a/apps/dashboard/tailwind.config.js b/apps/dashboard/tailwind.config.js index 00bc3e9..c01e514 100644 --- a/apps/dashboard/tailwind.config.js +++ b/apps/dashboard/tailwind.config.js @@ -1,52 +1,52 @@ -/** @type {import('tailwindcss').Config} */ -module.exports = { - content: [ - "./src/**/*.{html,ts}", - ], - theme: { - extend: { - colors: { - primary: { - 50: '#eff6ff', - 100: '#dbeafe', - 200: '#bfdbfe', - 300: '#93c5fd', - 400: '#60a5fa', - 500: '#3b82f6', - 600: '#2563eb', - 700: '#1d4ed8', - 800: '#1e40af', - 900: '#1e3a8a', - 950: '#172554', - }, - success: { - 50: '#f0fdf4', - 100: '#dcfce7', - 200: '#bbf7d0', - 300: '#86efac', - 400: '#4ade80', - 500: '#22c55e', - 600: '#16a34a', - 700: '#15803d', - 800: '#166534', - 900: '#14532d', - 950: '#052e16', - }, - danger: { - 50: '#fef2f2', - 100: '#fee2e2', - 200: '#fecaca', - 300: '#fca5a5', - 400: '#f87171', - 500: '#ef4444', - 600: '#dc2626', - 700: '#b91c1c', - 800: '#991b1b', - 900: '#7f1d1d', - 950: '#450a0a', - }, - }, - }, - }, - plugins: [], -} +/** @type {import('tailwindcss').Config} */ +module.exports = { + content: [ + "./src/**/*.{html,ts}", + ], + theme: { + extend: { + colors: { + primary: { + 50: '#eff6ff', + 100: '#dbeafe', + 200: '#bfdbfe', + 300: '#93c5fd', + 400: '#60a5fa', + 500: '#3b82f6', + 600: '#2563eb', + 700: '#1d4ed8', + 800: '#1e40af', + 900: '#1e3a8a', + 950: '#172554', + }, + success: { + 50: '#f0fdf4', + 100: '#dcfce7', + 200: '#bbf7d0', + 300: '#86efac', + 400: '#4ade80', + 500: '#22c55e', + 600: '#16a34a', + 700: '#15803d', + 800: '#166534', + 900: '#14532d', + 950: '#052e16', + }, + danger: { + 50: '#fef2f2', + 100: '#fee2e2', + 200: '#fecaca', + 300: '#fca5a5', + 400: '#f87171', + 500: '#ef4444', + 600: '#dc2626', + 700: '#b91c1c', + 800: '#991b1b', + 900: '#7f1d1d', + 950: '#450a0a', + }, + }, + }, + }, + plugins: [], +} diff --git a/apps/dashboard/tsconfig.app.json b/apps/dashboard/tsconfig.app.json index 264f459..254a59d 100644 --- a/apps/dashboard/tsconfig.app.json +++ b/apps/dashboard/tsconfig.app.json @@ -1,15 +1,15 @@ -/* To learn more about Typescript configuration file: https://www.typescriptlang.org/docs/handbook/tsconfig-json.html. */ -/* To learn more about Angular compiler options: https://angular.dev/reference/configs/angular-compiler-options. */ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "./out-tsc/app", - "types": [] - }, - "include": [ - "src/**/*.ts" - ], - "exclude": [ - "src/**/*.spec.ts" - ] -} +/* To learn more about Typescript configuration file: https://www.typescriptlang.org/docs/handbook/tsconfig-json.html. */ +/* To learn more about Angular compiler options: https://angular.dev/reference/configs/angular-compiler-options. */ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "./out-tsc/app", + "types": [] + }, + "include": [ + "src/**/*.ts" + ], + "exclude": [ + "src/**/*.spec.ts" + ] +} diff --git a/apps/dashboard/tsconfig.json b/apps/dashboard/tsconfig.json index fc4b18f..a8239ce 100644 --- a/apps/dashboard/tsconfig.json +++ b/apps/dashboard/tsconfig.json @@ -1,32 +1,32 @@ -/* To learn more about Typescript configuration file: https://www.typescriptlang.org/docs/handbook/tsconfig-json.html. */ -/* To learn more about Angular compiler options: https://angular.dev/reference/configs/angular-compiler-options. */ -{ - "extends": "../../tsconfig.json", - "compileOnSave": false, - "compilerOptions": { - "noImplicitOverride": true, - "noPropertyAccessFromIndexSignature": true, - "noImplicitReturns": true, - "noFallthroughCasesInSwitch": true, - "isolatedModules": true, - "experimentalDecorators": true, - "importHelpers": true, - "module": "preserve" - }, - "angularCompilerOptions": { - "enableI18nLegacyMessageIdFormat": false, - "strictInjectionParameters": true, - "strictInputAccessModifiers": true, - "typeCheckHostBindings": true, - "strictTemplates": true - }, - "files": [], - "references": [ - { - "path": "./tsconfig.app.json" - }, - { - "path": "./tsconfig.spec.json" - } - ] -} +/* To learn more about Typescript configuration file: https://www.typescriptlang.org/docs/handbook/tsconfig-json.html. */ +/* To learn more about Angular compiler options: https://angular.dev/reference/configs/angular-compiler-options. */ +{ + "extends": "../../tsconfig.json", + "compileOnSave": false, + "compilerOptions": { + "noImplicitOverride": true, + "noPropertyAccessFromIndexSignature": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "isolatedModules": true, + "experimentalDecorators": true, + "importHelpers": true, + "module": "preserve" + }, + "angularCompilerOptions": { + "enableI18nLegacyMessageIdFormat": false, + "strictInjectionParameters": true, + "strictInputAccessModifiers": true, + "typeCheckHostBindings": true, + "strictTemplates": true + }, + "files": [], + "references": [ + { + "path": "./tsconfig.app.json" + }, + { + "path": "./tsconfig.spec.json" + } + ] +} diff --git a/apps/dashboard/tsconfig.spec.json b/apps/dashboard/tsconfig.spec.json index 04df34c..f936da6 100644 --- a/apps/dashboard/tsconfig.spec.json +++ b/apps/dashboard/tsconfig.spec.json @@ -1,14 +1,14 @@ -/* To learn more about Typescript configuration file: https://www.typescriptlang.org/docs/handbook/tsconfig-json.html. */ -/* To learn more about Angular compiler options: https://angular.dev/reference/configs/angular-compiler-options. */ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "./out-tsc/spec", - "types": [ - "jasmine" - ] - }, - "include": [ - "src/**/*.ts" - ] -} +/* To learn more about Typescript configuration file: https://www.typescriptlang.org/docs/handbook/tsconfig-json.html. */ +/* To learn more about Angular compiler options: https://angular.dev/reference/configs/angular-compiler-options. */ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "./out-tsc/spec", + "types": [ + "jasmine" + ] + }, + "include": [ + "src/**/*.ts" + ] +} diff --git a/apps/data-service/src/index.ts b/apps/data-service/src/index.ts index 9527ba9..b43f22e 100644 --- a/apps/data-service/src/index.ts +++ b/apps/data-service/src/index.ts @@ -1,258 +1,258 @@ -/** - * Data Service - Combined live and historical data ingestion with queue-based architecture - */ -import { getLogger } from '@stock-bot/logger'; -import { loadEnvVariables } from '@stock-bot/config'; -import { Hono } from 'hono'; -import { serve } from '@hono/node-server'; -import { queueManager } from './services/queue.service'; - -// Load environment variables -loadEnvVariables(); - -const app = new Hono(); -const logger = getLogger('data-service'); - -const PORT = parseInt(process.env.DATA_SERVICE_PORT || '3002'); -// Health check endpoint -app.get('/health', (c) => { - return c.json({ - service: 'data-service', - status: 'healthy', - timestamp: new Date().toISOString(), - queue: { - status: 'running', - workers: queueManager.getWorkerCount() - } - }); -}); - -// Queue management endpoints -app.get('/api/queue/status', async (c) => { - try { - const status = await queueManager.getQueueStatus(); - return c.json({ status: 'success', data: status }); - } catch (error) { - logger.error('Failed to get queue status', { error }); - return c.json({ status: 'error', message: 'Failed to get queue status' }, 500); - } -}); - -app.post('/api/queue/job', async (c) => { - try { - const jobData = await c.req.json(); - const job = await queueManager.addJob(jobData); - return c.json({ status: 'success', jobId: job.id }); - } catch (error) { - logger.error('Failed to add job', { error }); - return c.json({ status: 'error', message: 'Failed to add job' }, 500); - } -}); - -// Market data endpoints -app.get('/api/live/:symbol', async (c) => { - const symbol = c.req.param('symbol'); - logger.info('Live data request', { symbol }); - - try { // Queue job for live data using Yahoo provider - const job = await queueManager.addJob({ - type: 'market-data-live', - service: 'market-data', - provider: 'yahoo-finance', - operation: 'live-data', - payload: { symbol } - }); - return c.json({ - status: 'success', - message: 'Live data job queued', - jobId: job.id, - symbol - }); - } catch (error) { - logger.error('Failed to queue live data job', { symbol, error }); - return c.json({ status: 'error', message: 'Failed to queue live data job' }, 500); - } -}); - -app.get('/api/historical/:symbol', async (c) => { - const symbol = c.req.param('symbol'); - const from = c.req.query('from'); - const to = c.req.query('to'); - - logger.info('Historical data request', { symbol, from, to }); - - try { - const fromDate = from ? new Date(from) : new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); // 30 days ago - const toDate = to ? new Date(to) : new Date(); // Now - // Queue job for historical data using Yahoo provider - const job = await queueManager.addJob({ - type: 'market-data-historical', - service: 'market-data', - provider: 'yahoo-finance', - operation: 'historical-data', - payload: { - symbol, - from: fromDate.toISOString(), - to: toDate.toISOString() - } - }); return c.json({ - status: 'success', - message: 'Historical data job queued', - jobId: job.id, - symbol, - from: fromDate, - to: toDate - }); - } catch (error) { - logger.error('Failed to queue historical data job', { symbol, from, to, error }); - return c.json({ status: 'error', message: 'Failed to queue historical data job' }, 500); } -}); - -// Proxy management endpoints -app.post('/api/proxy/fetch', async (c) => { - try { - const job = await queueManager.addJob({ - type: 'proxy-fetch', - service: 'proxy', - provider: 'proxy-service', - operation: 'fetch-and-check', - payload: {}, - priority: 5 - }); - - return c.json({ - status: 'success', - jobId: job.id, - message: 'Proxy fetch job queued' - }); - } catch (error) { - logger.error('Failed to queue proxy fetch', { error }); - return c.json({ status: 'error', message: 'Failed to queue proxy fetch' }, 500); - } -}); - -app.post('/api/proxy/check', async (c) => { - try { - const { proxies } = await c.req.json(); - const job = await queueManager.addJob({ - type: 'proxy-check', - service: 'proxy', - provider: 'proxy-service', - operation: 'check-specific', - payload: { proxies }, - priority: 8 - }); - - return c.json({ - status: 'success', - jobId: job.id, - message: `Proxy check job queued for ${proxies.length} proxies` - }); - } catch (error) { - logger.error('Failed to queue proxy check', { error }); - return c.json({ status: 'error', message: 'Failed to queue proxy check' }, 500); - } -}); - -// Get proxy stats via queue -app.get('/api/proxy/stats', async (c) => { - try { - const job = await queueManager.addJob({ - type: 'proxy-stats', - service: 'proxy', - provider: 'proxy-service', - operation: 'get-stats', - payload: {}, - priority: 3 - }); - - return c.json({ - status: 'success', - jobId: job.id, - message: 'Proxy stats job queued' - }); - } catch (error) { - logger.error('Failed to queue proxy stats', { error }); - return c.json({ status: 'error', message: 'Failed to queue proxy stats' }, 500); - } -}); - -// Provider registry endpoints -app.get('/api/providers', async (c) => { - try { - const providers = queueManager.getRegisteredProviders(); - return c.json({ status: 'success', providers }); - } catch (error) { - logger.error('Failed to get providers', { error }); - return c.json({ status: 'error', message: 'Failed to get providers' }, 500); - } -}); - -// Add new endpoint to see scheduled jobs -app.get('/api/scheduled-jobs', async (c) => { - try { - const jobs = queueManager.getScheduledJobsInfo(); - return c.json({ - status: 'success', - count: jobs.length, - jobs - }); - } catch (error) { - logger.error('Failed to get scheduled jobs info', { error }); - return c.json({ status: 'error', message: 'Failed to get scheduled jobs' }, 500); - } -}); - -// Initialize services -async function initializeServices() { - logger.info('Initializing data service...'); - - try { - // Initialize queue service - await queueManager.initialize(); - logger.info('Queue service initialized'); - logger.info('All services initialized successfully'); - } catch (error) { - logger.error('Failed to initialize services', { error }); - throw error; - } -} - -// Start server -async function startServer() { - await initializeServices(); - - serve({ - fetch: app.fetch, - port: PORT, - }); - - logger.info(`Data Service started on port ${PORT}`); - logger.info('Available endpoints:'); - logger.info(' GET /health - Health check'); - logger.info(' GET /api/queue/status - Queue status'); - logger.info(' POST /api/queue/job - Add job to queue'); - logger.info(' GET /api/live/:symbol - Live market data'); - logger.info(' GET /api/historical/:symbol - Historical market data'); - logger.info(' POST /api/proxy/fetch - Queue proxy fetch'); - logger.info(' POST /api/proxy/check - Queue proxy check'); - logger.info(' GET /api/providers - List registered providers'); -} - -// Graceful shutdown -process.on('SIGINT', async () => { - logger.info('Received SIGINT, shutting down gracefully...'); - await queueManager.shutdown(); - process.exit(0); -}); - -process.on('SIGTERM', async () => { - logger.info('Received SIGTERM, shutting down gracefully...'); - await queueManager.shutdown(); - process.exit(0); -}); - -startServer().catch(error => { - logger.error('Failed to start server', { error }); - process.exit(1); -}); +/** + * Data Service - Combined live and historical data ingestion with queue-based architecture + */ +import { getLogger } from '@stock-bot/logger'; +import { loadEnvVariables } from '@stock-bot/config'; +import { Hono } from 'hono'; +import { serve } from '@hono/node-server'; +import { queueManager } from './services/queue.service'; + +// Load environment variables +loadEnvVariables(); + +const app = new Hono(); +const logger = getLogger('data-service'); + +const PORT = parseInt(process.env.DATA_SERVICE_PORT || '3002'); +// Health check endpoint +app.get('/health', (c) => { + return c.json({ + service: 'data-service', + status: 'healthy', + timestamp: new Date().toISOString(), + queue: { + status: 'running', + workers: queueManager.getWorkerCount() + } + }); +}); + +// Queue management endpoints +app.get('/api/queue/status', async (c) => { + try { + const status = await queueManager.getQueueStatus(); + return c.json({ status: 'success', data: status }); + } catch (error) { + logger.error('Failed to get queue status', { error }); + return c.json({ status: 'error', message: 'Failed to get queue status' }, 500); + } +}); + +app.post('/api/queue/job', async (c) => { + try { + const jobData = await c.req.json(); + const job = await queueManager.addJob(jobData); + return c.json({ status: 'success', jobId: job.id }); + } catch (error) { + logger.error('Failed to add job', { error }); + return c.json({ status: 'error', message: 'Failed to add job' }, 500); + } +}); + +// Market data endpoints +app.get('/api/live/:symbol', async (c) => { + const symbol = c.req.param('symbol'); + logger.info('Live data request', { symbol }); + + try { // Queue job for live data using Yahoo provider + const job = await queueManager.addJob({ + type: 'market-data-live', + service: 'market-data', + provider: 'yahoo-finance', + operation: 'live-data', + payload: { symbol } + }); + return c.json({ + status: 'success', + message: 'Live data job queued', + jobId: job.id, + symbol + }); + } catch (error) { + logger.error('Failed to queue live data job', { symbol, error }); + return c.json({ status: 'error', message: 'Failed to queue live data job' }, 500); + } +}); + +app.get('/api/historical/:symbol', async (c) => { + const symbol = c.req.param('symbol'); + const from = c.req.query('from'); + const to = c.req.query('to'); + + logger.info('Historical data request', { symbol, from, to }); + + try { + const fromDate = from ? new Date(from) : new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); // 30 days ago + const toDate = to ? new Date(to) : new Date(); // Now + // Queue job for historical data using Yahoo provider + const job = await queueManager.addJob({ + type: 'market-data-historical', + service: 'market-data', + provider: 'yahoo-finance', + operation: 'historical-data', + payload: { + symbol, + from: fromDate.toISOString(), + to: toDate.toISOString() + } + }); return c.json({ + status: 'success', + message: 'Historical data job queued', + jobId: job.id, + symbol, + from: fromDate, + to: toDate + }); + } catch (error) { + logger.error('Failed to queue historical data job', { symbol, from, to, error }); + return c.json({ status: 'error', message: 'Failed to queue historical data job' }, 500); } +}); + +// Proxy management endpoints +app.post('/api/proxy/fetch', async (c) => { + try { + const job = await queueManager.addJob({ + type: 'proxy-fetch', + service: 'proxy', + provider: 'proxy-service', + operation: 'fetch-and-check', + payload: {}, + priority: 5 + }); + + return c.json({ + status: 'success', + jobId: job.id, + message: 'Proxy fetch job queued' + }); + } catch (error) { + logger.error('Failed to queue proxy fetch', { error }); + return c.json({ status: 'error', message: 'Failed to queue proxy fetch' }, 500); + } +}); + +app.post('/api/proxy/check', async (c) => { + try { + const { proxies } = await c.req.json(); + const job = await queueManager.addJob({ + type: 'proxy-check', + service: 'proxy', + provider: 'proxy-service', + operation: 'check-specific', + payload: { proxies }, + priority: 8 + }); + + return c.json({ + status: 'success', + jobId: job.id, + message: `Proxy check job queued for ${proxies.length} proxies` + }); + } catch (error) { + logger.error('Failed to queue proxy check', { error }); + return c.json({ status: 'error', message: 'Failed to queue proxy check' }, 500); + } +}); + +// Get proxy stats via queue +app.get('/api/proxy/stats', async (c) => { + try { + const job = await queueManager.addJob({ + type: 'proxy-stats', + service: 'proxy', + provider: 'proxy-service', + operation: 'get-stats', + payload: {}, + priority: 3 + }); + + return c.json({ + status: 'success', + jobId: job.id, + message: 'Proxy stats job queued' + }); + } catch (error) { + logger.error('Failed to queue proxy stats', { error }); + return c.json({ status: 'error', message: 'Failed to queue proxy stats' }, 500); + } +}); + +// Provider registry endpoints +app.get('/api/providers', async (c) => { + try { + const providers = queueManager.getRegisteredProviders(); + return c.json({ status: 'success', providers }); + } catch (error) { + logger.error('Failed to get providers', { error }); + return c.json({ status: 'error', message: 'Failed to get providers' }, 500); + } +}); + +// Add new endpoint to see scheduled jobs +app.get('/api/scheduled-jobs', async (c) => { + try { + const jobs = queueManager.getScheduledJobsInfo(); + return c.json({ + status: 'success', + count: jobs.length, + jobs + }); + } catch (error) { + logger.error('Failed to get scheduled jobs info', { error }); + return c.json({ status: 'error', message: 'Failed to get scheduled jobs' }, 500); + } +}); + +// Initialize services +async function initializeServices() { + logger.info('Initializing data service...'); + + try { + // Initialize queue service + await queueManager.initialize(); + logger.info('Queue service initialized'); + logger.info('All services initialized successfully'); + } catch (error) { + logger.error('Failed to initialize services', { error }); + throw error; + } +} + +// Start server +async function startServer() { + await initializeServices(); + + serve({ + fetch: app.fetch, + port: PORT, + }); + + logger.info(`Data Service started on port ${PORT}`); + logger.info('Available endpoints:'); + logger.info(' GET /health - Health check'); + logger.info(' GET /api/queue/status - Queue status'); + logger.info(' POST /api/queue/job - Add job to queue'); + logger.info(' GET /api/live/:symbol - Live market data'); + logger.info(' GET /api/historical/:symbol - Historical market data'); + logger.info(' POST /api/proxy/fetch - Queue proxy fetch'); + logger.info(' POST /api/proxy/check - Queue proxy check'); + logger.info(' GET /api/providers - List registered providers'); +} + +// Graceful shutdown +process.on('SIGINT', async () => { + logger.info('Received SIGINT, shutting down gracefully...'); + await queueManager.shutdown(); + process.exit(0); +}); + +process.on('SIGTERM', async () => { + logger.info('Received SIGTERM, shutting down gracefully...'); + await queueManager.shutdown(); + process.exit(0); +}); + +startServer().catch(error => { + logger.error('Failed to start server', { error }); + process.exit(1); +}); diff --git a/apps/data-service/src/providers/proxy.provider.ts b/apps/data-service/src/providers/proxy.provider.ts index bde3fd2..7e2111d 100644 --- a/apps/data-service/src/providers/proxy.provider.ts +++ b/apps/data-service/src/providers/proxy.provider.ts @@ -1,140 +1,140 @@ -import { ProxyInfo } from 'libs/http/src/types'; -import { ProviderConfig } from '../services/provider-registry.service'; -import { getLogger } from '@stock-bot/logger'; -import { BatchProcessor } from '../utils/batch-processor'; - -// Create logger for this provider -const logger = getLogger('proxy-provider'); - -// This will run at the same time each day as when the app started -const getEvery24HourCron = (): string => { - const now = new Date(); - const hours = now.getHours(); - const minutes = now.getMinutes(); - return `${minutes} ${hours} * * *`; // Every day at startup time -}; - -export const proxyProvider: ProviderConfig = { - name: 'proxy-service', - service: 'proxy', - operations: { - 'fetch-and-check': async (payload: { sources?: string[] }) => { - const { proxyService } = await import('./proxy.tasks'); - const { queueManager } = await import('../services/queue.service'); - - const proxies = await proxyService.fetchProxiesFromSources(); - - if (proxies.length === 0) { - return { proxiesFetched: 0, jobsCreated: 0 }; - } - - const batchProcessor = new BatchProcessor(queueManager); - - // Simplified configuration - const result = await batchProcessor.processItems({ - items: proxies, - batchSize: parseInt(process.env.PROXY_BATCH_SIZE || '200'), - totalDelayMs: parseInt(process.env.PROXY_VALIDATION_HOURS || '4') * 60 * 60 * 1000 , - jobNamePrefix: 'proxy', - operation: 'check-proxy', - service: 'proxy', - provider: 'proxy-service', - priority: 2, - useBatching: process.env.PROXY_DIRECT_MODE !== 'true', // Simple boolean flag - createJobData: (proxy: ProxyInfo) => ({ - proxy, - source: 'fetch-and-check' - }), - removeOnComplete: 5, - removeOnFail: 3 - }); - - return { - proxiesFetched: result.totalItems, - ...result - }; - }, - - 'process-proxy-batch': async (payload: any) => { - // Process a batch of proxies - uses the fetch-and-check JobNamePrefix process-(proxy)-batch - const { queueManager } = await import('../services/queue.service'); - const batchProcessor = new BatchProcessor(queueManager); - return await batchProcessor.processBatch( - payload, - (proxy: ProxyInfo) => ({ - proxy, - source: payload.config?.source || 'batch-processing' - }) - ); - }, - - 'check-proxy': async (payload: { - proxy: ProxyInfo, - source?: string, - batchIndex?: number, - itemIndex?: number, - total?: number - }) => { - const { checkProxy } = await import('./proxy.tasks'); - - try { - const result = await checkProxy(payload.proxy); - - logger.debug('Proxy validated', { - proxy: `${payload.proxy.host}:${payload.proxy.port}`, - isWorking: result.isWorking, - responseTime: result.responseTime, - batchIndex: payload.batchIndex - }); - - return { - result, - proxy: payload.proxy, - // Only include batch info if it exists (for batch mode) - ...(payload.batchIndex !== undefined && { - batchInfo: { - batchIndex: payload.batchIndex, - itemIndex: payload.itemIndex, - total: payload.total, - source: payload.source - } - }) - }; - } catch (error) { - logger.warn('Proxy validation failed', { - proxy: `${payload.proxy.host}:${payload.proxy.port}`, - error: error instanceof Error ? error.message : String(error), - batchIndex: payload.batchIndex - }); - - return { - result: { isWorking: false, error: String(error) }, - proxy: payload.proxy, - // Only include batch info if it exists (for batch mode) - ...(payload.batchIndex !== undefined && { - batchInfo: { - batchIndex: payload.batchIndex, - itemIndex: payload.itemIndex, - total: payload.total, - source: payload.source - } - }) - }; - } - } - }, - scheduledJobs: [ - { - type: 'proxy-maintenance', - operation: 'fetch-and-check', - payload: {}, - // should remove and just run at the same time so app restarts dont keeping adding same jobs - cronPattern: getEvery24HourCron(), - priority: 5, - immediately: true, - description: 'Fetch and validate proxy list from sources' - } - ] -}; - - +import { ProxyInfo } from 'libs/http/src/types'; +import { ProviderConfig } from '../services/provider-registry.service'; +import { getLogger } from '@stock-bot/logger'; +import { BatchProcessor } from '../utils/batch-processor'; + +// Create logger for this provider +const logger = getLogger('proxy-provider'); + +// This will run at the same time each day as when the app started +const getEvery24HourCron = (): string => { + const now = new Date(); + const hours = now.getHours(); + const minutes = now.getMinutes(); + return `${minutes} ${hours} * * *`; // Every day at startup time +}; + +export const proxyProvider: ProviderConfig = { + name: 'proxy-service', + service: 'proxy', + operations: { + 'fetch-and-check': async (payload: { sources?: string[] }) => { + const { proxyService } = await import('./proxy.tasks'); + const { queueManager } = await import('../services/queue.service'); + + const proxies = await proxyService.fetchProxiesFromSources(); + + if (proxies.length === 0) { + return { proxiesFetched: 0, jobsCreated: 0 }; + } + + const batchProcessor = new BatchProcessor(queueManager); + + // Simplified configuration + const result = await batchProcessor.processItems({ + items: proxies, + batchSize: parseInt(process.env.PROXY_BATCH_SIZE || '200'), + totalDelayMs: parseInt(process.env.PROXY_VALIDATION_HOURS || '4') * 60 * 60 * 1000 , + jobNamePrefix: 'proxy', + operation: 'check-proxy', + service: 'proxy', + provider: 'proxy-service', + priority: 2, + useBatching: process.env.PROXY_DIRECT_MODE !== 'true', // Simple boolean flag + createJobData: (proxy: ProxyInfo) => ({ + proxy, + source: 'fetch-and-check' + }), + removeOnComplete: 5, + removeOnFail: 3 + }); + + return { + proxiesFetched: result.totalItems, + ...result + }; + }, + + 'process-proxy-batch': async (payload: any) => { + // Process a batch of proxies - uses the fetch-and-check JobNamePrefix process-(proxy)-batch + const { queueManager } = await import('../services/queue.service'); + const batchProcessor = new BatchProcessor(queueManager); + return await batchProcessor.processBatch( + payload, + (proxy: ProxyInfo) => ({ + proxy, + source: payload.config?.source || 'batch-processing' + }) + ); + }, + + 'check-proxy': async (payload: { + proxy: ProxyInfo, + source?: string, + batchIndex?: number, + itemIndex?: number, + total?: number + }) => { + const { checkProxy } = await import('./proxy.tasks'); + + try { + const result = await checkProxy(payload.proxy); + + logger.debug('Proxy validated', { + proxy: `${payload.proxy.host}:${payload.proxy.port}`, + isWorking: result.isWorking, + responseTime: result.responseTime, + batchIndex: payload.batchIndex + }); + + return { + result, + proxy: payload.proxy, + // Only include batch info if it exists (for batch mode) + ...(payload.batchIndex !== undefined && { + batchInfo: { + batchIndex: payload.batchIndex, + itemIndex: payload.itemIndex, + total: payload.total, + source: payload.source + } + }) + }; + } catch (error) { + logger.warn('Proxy validation failed', { + proxy: `${payload.proxy.host}:${payload.proxy.port}`, + error: error instanceof Error ? error.message : String(error), + batchIndex: payload.batchIndex + }); + + return { + result: { isWorking: false, error: String(error) }, + proxy: payload.proxy, + // Only include batch info if it exists (for batch mode) + ...(payload.batchIndex !== undefined && { + batchInfo: { + batchIndex: payload.batchIndex, + itemIndex: payload.itemIndex, + total: payload.total, + source: payload.source + } + }) + }; + } + } + }, + scheduledJobs: [ + { + type: 'proxy-maintenance', + operation: 'fetch-and-check', + payload: {}, + // should remove and just run at the same time so app restarts dont keeping adding same jobs + cronPattern: getEvery24HourCron(), + priority: 5, + immediately: true, + description: 'Fetch and validate proxy list from sources' + } + ] +}; + + diff --git a/apps/data-service/src/providers/proxy.tasks.ts b/apps/data-service/src/providers/proxy.tasks.ts index b9ebfd4..066d7c8 100644 --- a/apps/data-service/src/providers/proxy.tasks.ts +++ b/apps/data-service/src/providers/proxy.tasks.ts @@ -1,264 +1,264 @@ -import { getLogger } from '@stock-bot/logger'; -import createCache, { type CacheProvider } from '@stock-bot/cache'; -import { HttpClient, ProxyInfo } from '@stock-bot/http'; -import pLimit from 'p-limit'; - -// Shared configuration and utilities -const PROXY_CONFIG = { - CACHE_KEY: 'proxy', - CACHE_TTL: 86400, // 24 hours - CHECK_TIMEOUT: 7000, - CHECK_IP: '99.246.102.205', - CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955', - CONCURRENCY_LIMIT: 100, - PROXY_SOURCES: [ - {url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http' }, - {url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt', protocol: 'http' }, - {url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', }, - {url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', }, - - // {url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', }, - // {url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', }, - // {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https' }, - // {url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',protocol: 'https', }, - // {url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',protocol: 'https', }, - // {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', }, - // {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', }, - ] -}; - -// Shared instances (module-scoped, not global) -let logger: ReturnType; -let cache: CacheProvider; -let httpClient: HttpClient; -let concurrencyLimit: ReturnType; - -// Initialize shared resources -function initializeSharedResources() { - if (!logger) { - logger = getLogger('proxy-tasks'); - cache = createCache('hybrid'); - httpClient = new HttpClient({ timeout: 10000 }, logger); - concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT); - logger.info('Proxy tasks initialized'); - } -} - -// Individual task functions -export async function queueProxyFetch(): Promise { - initializeSharedResources(); - - const { queueManager } = await import('../services/queue.service'); - const job = await queueManager.addJob({ - type: 'proxy-fetch', - service: 'proxy', - provider: 'proxy-service', - operation: 'fetch-and-check', - payload: {}, - priority: 5 - }); - - const jobId = job.id || 'unknown'; - logger.info('Proxy fetch job queued', { jobId }); - return jobId; -} - -export async function queueProxyCheck(proxies: ProxyInfo[]): Promise { - initializeSharedResources(); - - const { queueManager } = await import('../services/queue.service'); - const job = await queueManager.addJob({ - type: 'proxy-check', - service: 'proxy', - provider: 'proxy-service', - operation: 'check-specific', - payload: { proxies }, - priority: 3 - }); - - const jobId = job.id || 'unknown'; - logger.info('Proxy check job queued', { jobId, count: proxies.length }); - return jobId; -} - -export async function fetchProxiesFromSources(): Promise { - initializeSharedResources(); - - const sources = PROXY_CONFIG.PROXY_SOURCES.map(source => - concurrencyLimit(() => fetchProxiesFromSource(source)) - ); - const result = await Promise.all(sources); - let allProxies: ProxyInfo[] = result.flat(); - allProxies = removeDuplicateProxies(allProxies); - // await checkProxies(allProxies); - return allProxies; -} - -export async function fetchProxiesFromSource(source: { url: string; protocol: string }): Promise { - initializeSharedResources(); - - const allProxies: ProxyInfo[] = []; - - try { - logger.info(`Fetching proxies from ${source.url}`); - - const response = await httpClient.get(source.url, { - timeout: 10000 - }); - - if (response.status !== 200) { - logger.warn(`Failed to fetch from ${source.url}: ${response.status}`); - return []; - } - - const text = response.data; - const lines = text.split('\n').filter((line: string) => line.trim()); - - for (const line of lines) { - let trimmed = line.trim(); - trimmed = cleanProxyUrl(trimmed); - if (!trimmed || trimmed.startsWith('#')) continue; - - // Parse formats like "host:port" or "host:port:user:pass" - const parts = trimmed.split(':'); - if (parts.length >= 2) { - const proxy: ProxyInfo = { - protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5', - host: parts[0], - port: parseInt(parts[1]) - }; - - if (!isNaN(proxy.port) && proxy.host) { - allProxies.push(proxy); - } - } - } - - logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`); - - } catch (error) { - logger.error(`Error fetching proxies from ${source.url}`, error); - return []; - } - - return allProxies; -} - -/** - * Check if a proxy is working - */ -export async function checkProxy(proxy: ProxyInfo): Promise { - initializeSharedResources(); - - let success = false; - logger.debug(`Checking Proxy:`, { - protocol: proxy.protocol, - host: proxy.host, - port: proxy.port, - }); - - try { - // Test the proxy - const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, { - proxy, - timeout: PROXY_CONFIG.CHECK_TIMEOUT - }); - - const isWorking = response.status >= 200 && response.status < 300; - - const result: ProxyInfo = { - ...proxy, - isWorking, - checkedAt: new Date(), - responseTime: response.responseTime, - }; - - if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) { - success = true; - await cache.set(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`, result, PROXY_CONFIG.CACHE_TTL); - } else { - await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`); - } - - logger.debug('Proxy check completed', { - host: proxy.host, - port: proxy.port, - isWorking, - }); - - return result; - - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); - - const result: ProxyInfo = { - ...proxy, - isWorking: false, - error: errorMessage, - checkedAt: new Date() - }; - - // If the proxy check failed, remove it from cache - success is here cause i think abort signal fails sometimes - if (!success) { - await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`); - } - - logger.debug('Proxy check failed', { - host: proxy.host, - port: proxy.port, - error: errorMessage - }); - - return result; - } -} - -// Utility functions -function cleanProxyUrl(url: string): string { - return url - .replace(/^https?:\/\//, '') - .replace(/^0+/, '') - .replace(/:0+(\d)/g, ':$1'); -} - -function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] { - const seen = new Set(); - const unique: ProxyInfo[] = []; - - for (const proxy of proxies) { - const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`; - if (!seen.has(key)) { - seen.add(key); - unique.push(proxy); - } - } - - return unique; -} - -// Optional: Export a convenience object that groups related tasks -export const proxyTasks = { - queueProxyFetch, - queueProxyCheck, - fetchProxiesFromSources, - fetchProxiesFromSource, - checkProxy, -}; - -// Export singleton instance for backward compatibility (optional) -// Remove this if you want to fully move to the task-based approach +import { getLogger } from '@stock-bot/logger'; +import createCache, { type CacheProvider } from '@stock-bot/cache'; +import { HttpClient, ProxyInfo } from '@stock-bot/http'; +import pLimit from 'p-limit'; + +// Shared configuration and utilities +const PROXY_CONFIG = { + CACHE_KEY: 'proxy', + CACHE_TTL: 86400, // 24 hours + CHECK_TIMEOUT: 7000, + CHECK_IP: '99.246.102.205', + CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955', + CONCURRENCY_LIMIT: 100, + PROXY_SOURCES: [ + {url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt', protocol: 'http' }, + {url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt', protocol: 'http' }, + {url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',protocol: 'http', }, + {url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',protocol: 'http', }, + + // {url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',protocol: 'https', }, + // {url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',protocol: 'https', }, + // {url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt', protocol: 'https' }, + // {url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',protocol: 'https', }, + // {url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',protocol: 'https', }, + // {url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',protocol: 'https', }, + // {url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',protocol: 'https', }, + ] +}; + +// Shared instances (module-scoped, not global) +let logger: ReturnType; +let cache: CacheProvider; +let httpClient: HttpClient; +let concurrencyLimit: ReturnType; + +// Initialize shared resources +function initializeSharedResources() { + if (!logger) { + logger = getLogger('proxy-tasks'); + cache = createCache('hybrid'); + httpClient = new HttpClient({ timeout: 10000 }, logger); + concurrencyLimit = pLimit(PROXY_CONFIG.CONCURRENCY_LIMIT); + logger.info('Proxy tasks initialized'); + } +} + +// Individual task functions +export async function queueProxyFetch(): Promise { + initializeSharedResources(); + + const { queueManager } = await import('../services/queue.service'); + const job = await queueManager.addJob({ + type: 'proxy-fetch', + service: 'proxy', + provider: 'proxy-service', + operation: 'fetch-and-check', + payload: {}, + priority: 5 + }); + + const jobId = job.id || 'unknown'; + logger.info('Proxy fetch job queued', { jobId }); + return jobId; +} + +export async function queueProxyCheck(proxies: ProxyInfo[]): Promise { + initializeSharedResources(); + + const { queueManager } = await import('../services/queue.service'); + const job = await queueManager.addJob({ + type: 'proxy-check', + service: 'proxy', + provider: 'proxy-service', + operation: 'check-specific', + payload: { proxies }, + priority: 3 + }); + + const jobId = job.id || 'unknown'; + logger.info('Proxy check job queued', { jobId, count: proxies.length }); + return jobId; +} + +export async function fetchProxiesFromSources(): Promise { + initializeSharedResources(); + + const sources = PROXY_CONFIG.PROXY_SOURCES.map(source => + concurrencyLimit(() => fetchProxiesFromSource(source)) + ); + const result = await Promise.all(sources); + let allProxies: ProxyInfo[] = result.flat(); + allProxies = removeDuplicateProxies(allProxies); + // await checkProxies(allProxies); + return allProxies; +} + +export async function fetchProxiesFromSource(source: { url: string; protocol: string }): Promise { + initializeSharedResources(); + + const allProxies: ProxyInfo[] = []; + + try { + logger.info(`Fetching proxies from ${source.url}`); + + const response = await httpClient.get(source.url, { + timeout: 10000 + }); + + if (response.status !== 200) { + logger.warn(`Failed to fetch from ${source.url}: ${response.status}`); + return []; + } + + const text = response.data; + const lines = text.split('\n').filter((line: string) => line.trim()); + + for (const line of lines) { + let trimmed = line.trim(); + trimmed = cleanProxyUrl(trimmed); + if (!trimmed || trimmed.startsWith('#')) continue; + + // Parse formats like "host:port" or "host:port:user:pass" + const parts = trimmed.split(':'); + if (parts.length >= 2) { + const proxy: ProxyInfo = { + protocol: source.protocol as 'http' | 'https' | 'socks4' | 'socks5', + host: parts[0], + port: parseInt(parts[1]) + }; + + if (!isNaN(proxy.port) && proxy.host) { + allProxies.push(proxy); + } + } + } + + logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`); + + } catch (error) { + logger.error(`Error fetching proxies from ${source.url}`, error); + return []; + } + + return allProxies; +} + +/** + * Check if a proxy is working + */ +export async function checkProxy(proxy: ProxyInfo): Promise { + initializeSharedResources(); + + let success = false; + logger.debug(`Checking Proxy:`, { + protocol: proxy.protocol, + host: proxy.host, + port: proxy.port, + }); + + try { + // Test the proxy + const response = await httpClient.get(PROXY_CONFIG.CHECK_URL, { + proxy, + timeout: PROXY_CONFIG.CHECK_TIMEOUT + }); + + const isWorking = response.status >= 200 && response.status < 300; + + const result: ProxyInfo = { + ...proxy, + isWorking, + checkedAt: new Date(), + responseTime: response.responseTime, + }; + + if (isWorking && !JSON.stringify(response.data).includes(PROXY_CONFIG.CHECK_IP)) { + success = true; + await cache.set(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`, result, PROXY_CONFIG.CACHE_TTL); + } else { + await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`); + } + + logger.debug('Proxy check completed', { + host: proxy.host, + port: proxy.port, + isWorking, + }); + + return result; + + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + + const result: ProxyInfo = { + ...proxy, + isWorking: false, + error: errorMessage, + checkedAt: new Date() + }; + + // If the proxy check failed, remove it from cache - success is here cause i think abort signal fails sometimes + if (!success) { + await cache.del(`${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`); + } + + logger.debug('Proxy check failed', { + host: proxy.host, + port: proxy.port, + error: errorMessage + }); + + return result; + } +} + +// Utility functions +function cleanProxyUrl(url: string): string { + return url + .replace(/^https?:\/\//, '') + .replace(/^0+/, '') + .replace(/:0+(\d)/g, ':$1'); +} + +function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] { + const seen = new Set(); + const unique: ProxyInfo[] = []; + + for (const proxy of proxies) { + const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`; + if (!seen.has(key)) { + seen.add(key); + unique.push(proxy); + } + } + + return unique; +} + +// Optional: Export a convenience object that groups related tasks +export const proxyTasks = { + queueProxyFetch, + queueProxyCheck, + fetchProxiesFromSources, + fetchProxiesFromSource, + checkProxy, +}; + +// Export singleton instance for backward compatibility (optional) +// Remove this if you want to fully move to the task-based approach export const proxyService = proxyTasks; \ No newline at end of file diff --git a/apps/data-service/src/providers/quotemedia.provider.ts b/apps/data-service/src/providers/quotemedia.provider.ts index 4a2d136..2203c23 100644 --- a/apps/data-service/src/providers/quotemedia.provider.ts +++ b/apps/data-service/src/providers/quotemedia.provider.ts @@ -1,175 +1,175 @@ -import { ProviderConfig } from '../services/provider-registry.service'; -import { getLogger } from '@stock-bot/logger'; - -const logger = getLogger('quotemedia-provider'); - -export const quotemediaProvider: ProviderConfig = { - name: 'quotemedia', - service: 'market-data', - operations: { 'live-data': async (payload: { symbol: string; fields?: string[] }) => { - logger.info('Fetching live data from QuoteMedia', { symbol: payload.symbol }); - - // Simulate QuoteMedia API call - const mockData = { - symbol: payload.symbol, - price: Math.random() * 1000 + 100, - volume: Math.floor(Math.random() * 1000000), - change: (Math.random() - 0.5) * 20, - changePercent: (Math.random() - 0.5) * 5, - timestamp: new Date().toISOString(), - source: 'quotemedia', - fields: payload.fields || ['price', 'volume', 'change'] - }; - - // Simulate network delay - await new Promise(resolve => setTimeout(resolve, 100 + Math.random() * 200)); - - return mockData; - }, - - 'historical-data': async (payload: { - symbol: string; - from: Date; - to: Date; - interval?: string; - fields?: string[]; }) => { - logger.info('Fetching historical data from QuoteMedia', { - symbol: payload.symbol, - from: payload.from, - to: payload.to, - interval: payload.interval || '1d' - }); - - // Generate mock historical data - const days = Math.ceil((payload.to.getTime() - payload.from.getTime()) / (1000 * 60 * 60 * 24)); - const data = []; - - for (let i = 0; i < Math.min(days, 100); i++) { - const date = new Date(payload.from.getTime() + i * 24 * 60 * 60 * 1000); - data.push({ - date: date.toISOString().split('T')[0], - open: Math.random() * 1000 + 100, - high: Math.random() * 1000 + 100, - low: Math.random() * 1000 + 100, - close: Math.random() * 1000 + 100, - volume: Math.floor(Math.random() * 1000000), - source: 'quotemedia' - }); - } - - // Simulate network delay - await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 300)); - - return { - symbol: payload.symbol, - interval: payload.interval || '1d', - data, - source: 'quotemedia', - totalRecords: data.length - }; - }, - 'batch-quotes': async (payload: { symbols: string[]; fields?: string[] }) => { - logger.info('Fetching batch quotes from QuoteMedia', { - symbols: payload.symbols, - count: payload.symbols.length - }); - - const quotes = payload.symbols.map(symbol => ({ - symbol, - price: Math.random() * 1000 + 100, - volume: Math.floor(Math.random() * 1000000), - change: (Math.random() - 0.5) * 20, - timestamp: new Date().toISOString(), - source: 'quotemedia' - })); - - // Simulate network delay - await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200)); - - return { - quotes, - source: 'quotemedia', - timestamp: new Date().toISOString(), - totalSymbols: payload.symbols.length - }; - }, 'company-profile': async (payload: { symbol: string }) => { - logger.info('Fetching company profile from QuoteMedia', { symbol: payload.symbol }); - - // Simulate company profile data - const profile = { - symbol: payload.symbol, - companyName: `${payload.symbol} Corporation`, - sector: 'Technology', - industry: 'Software', - description: `${payload.symbol} is a leading technology company.`, - marketCap: Math.floor(Math.random() * 1000000000000), - employees: Math.floor(Math.random() * 100000), - website: `https://www.${payload.symbol.toLowerCase()}.com`, - source: 'quotemedia' - }; - - await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 100)); - - return profile; - }, 'options-chain': async (payload: { symbol: string; expiration?: string }) => { - logger.info('Fetching options chain from QuoteMedia', { - symbol: payload.symbol, - expiration: payload.expiration - }); - - // Generate mock options data - const strikes = Array.from({ length: 20 }, (_, i) => 100 + i * 5); - const calls = strikes.map(strike => ({ - strike, - bid: Math.random() * 10, - ask: Math.random() * 10 + 0.5, - volume: Math.floor(Math.random() * 1000), - openInterest: Math.floor(Math.random() * 5000) - })); - - const puts = strikes.map(strike => ({ - strike, - bid: Math.random() * 10, - ask: Math.random() * 10 + 0.5, - volume: Math.floor(Math.random() * 1000), - openInterest: Math.floor(Math.random() * 5000) - })); - - await new Promise(resolve => setTimeout(resolve, 400 + Math.random() * 300)); - return { - symbol: payload.symbol, - expiration: payload.expiration || new Date(Date.now() + 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0], - calls, - puts, - source: 'quotemedia' - }; - } - }, - - scheduledJobs: [ - // { - // type: 'quotemedia-premium-refresh', - // operation: 'batch-quotes', - // payload: { symbols: ['AAPL', 'GOOGL', 'MSFT'] }, - // cronPattern: '*/2 * * * *', // Every 2 minutes - // priority: 7, - // description: 'Refresh premium quotes with detailed market data' - // }, - // { - // type: 'quotemedia-options-update', - // operation: 'options-chain', - // payload: { symbol: 'SPY' }, - // cronPattern: '*/10 * * * *', // Every 10 minutes - // priority: 5, - // description: 'Update options chain data for SPY ETF' - // }, - // { - // type: 'quotemedia-profiles', - // operation: 'company-profile', - // payload: { symbol: 'AAPL' }, - // cronPattern: '0 9 * * 1-5', // Weekdays at 9 AM - // priority: 3, - // description: 'Update company profile data' - // } - ] -}; +import { ProviderConfig } from '../services/provider-registry.service'; +import { getLogger } from '@stock-bot/logger'; + +const logger = getLogger('quotemedia-provider'); + +export const quotemediaProvider: ProviderConfig = { + name: 'quotemedia', + service: 'market-data', + operations: { 'live-data': async (payload: { symbol: string; fields?: string[] }) => { + logger.info('Fetching live data from QuoteMedia', { symbol: payload.symbol }); + + // Simulate QuoteMedia API call + const mockData = { + symbol: payload.symbol, + price: Math.random() * 1000 + 100, + volume: Math.floor(Math.random() * 1000000), + change: (Math.random() - 0.5) * 20, + changePercent: (Math.random() - 0.5) * 5, + timestamp: new Date().toISOString(), + source: 'quotemedia', + fields: payload.fields || ['price', 'volume', 'change'] + }; + + // Simulate network delay + await new Promise(resolve => setTimeout(resolve, 100 + Math.random() * 200)); + + return mockData; + }, + + 'historical-data': async (payload: { + symbol: string; + from: Date; + to: Date; + interval?: string; + fields?: string[]; }) => { + logger.info('Fetching historical data from QuoteMedia', { + symbol: payload.symbol, + from: payload.from, + to: payload.to, + interval: payload.interval || '1d' + }); + + // Generate mock historical data + const days = Math.ceil((payload.to.getTime() - payload.from.getTime()) / (1000 * 60 * 60 * 24)); + const data = []; + + for (let i = 0; i < Math.min(days, 100); i++) { + const date = new Date(payload.from.getTime() + i * 24 * 60 * 60 * 1000); + data.push({ + date: date.toISOString().split('T')[0], + open: Math.random() * 1000 + 100, + high: Math.random() * 1000 + 100, + low: Math.random() * 1000 + 100, + close: Math.random() * 1000 + 100, + volume: Math.floor(Math.random() * 1000000), + source: 'quotemedia' + }); + } + + // Simulate network delay + await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 300)); + + return { + symbol: payload.symbol, + interval: payload.interval || '1d', + data, + source: 'quotemedia', + totalRecords: data.length + }; + }, + 'batch-quotes': async (payload: { symbols: string[]; fields?: string[] }) => { + logger.info('Fetching batch quotes from QuoteMedia', { + symbols: payload.symbols, + count: payload.symbols.length + }); + + const quotes = payload.symbols.map(symbol => ({ + symbol, + price: Math.random() * 1000 + 100, + volume: Math.floor(Math.random() * 1000000), + change: (Math.random() - 0.5) * 20, + timestamp: new Date().toISOString(), + source: 'quotemedia' + })); + + // Simulate network delay + await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200)); + + return { + quotes, + source: 'quotemedia', + timestamp: new Date().toISOString(), + totalSymbols: payload.symbols.length + }; + }, 'company-profile': async (payload: { symbol: string }) => { + logger.info('Fetching company profile from QuoteMedia', { symbol: payload.symbol }); + + // Simulate company profile data + const profile = { + symbol: payload.symbol, + companyName: `${payload.symbol} Corporation`, + sector: 'Technology', + industry: 'Software', + description: `${payload.symbol} is a leading technology company.`, + marketCap: Math.floor(Math.random() * 1000000000000), + employees: Math.floor(Math.random() * 100000), + website: `https://www.${payload.symbol.toLowerCase()}.com`, + source: 'quotemedia' + }; + + await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 100)); + + return profile; + }, 'options-chain': async (payload: { symbol: string; expiration?: string }) => { + logger.info('Fetching options chain from QuoteMedia', { + symbol: payload.symbol, + expiration: payload.expiration + }); + + // Generate mock options data + const strikes = Array.from({ length: 20 }, (_, i) => 100 + i * 5); + const calls = strikes.map(strike => ({ + strike, + bid: Math.random() * 10, + ask: Math.random() * 10 + 0.5, + volume: Math.floor(Math.random() * 1000), + openInterest: Math.floor(Math.random() * 5000) + })); + + const puts = strikes.map(strike => ({ + strike, + bid: Math.random() * 10, + ask: Math.random() * 10 + 0.5, + volume: Math.floor(Math.random() * 1000), + openInterest: Math.floor(Math.random() * 5000) + })); + + await new Promise(resolve => setTimeout(resolve, 400 + Math.random() * 300)); + return { + symbol: payload.symbol, + expiration: payload.expiration || new Date(Date.now() + 30 * 24 * 60 * 60 * 1000).toISOString().split('T')[0], + calls, + puts, + source: 'quotemedia' + }; + } + }, + + scheduledJobs: [ + // { + // type: 'quotemedia-premium-refresh', + // operation: 'batch-quotes', + // payload: { symbols: ['AAPL', 'GOOGL', 'MSFT'] }, + // cronPattern: '*/2 * * * *', // Every 2 minutes + // priority: 7, + // description: 'Refresh premium quotes with detailed market data' + // }, + // { + // type: 'quotemedia-options-update', + // operation: 'options-chain', + // payload: { symbol: 'SPY' }, + // cronPattern: '*/10 * * * *', // Every 10 minutes + // priority: 5, + // description: 'Update options chain data for SPY ETF' + // }, + // { + // type: 'quotemedia-profiles', + // operation: 'company-profile', + // payload: { symbol: 'AAPL' }, + // cronPattern: '0 9 * * 1-5', // Weekdays at 9 AM + // priority: 3, + // description: 'Update company profile data' + // } + ] +}; diff --git a/apps/data-service/src/providers/yahoo.provider.ts b/apps/data-service/src/providers/yahoo.provider.ts index ae849d5..2eb26b1 100644 --- a/apps/data-service/src/providers/yahoo.provider.ts +++ b/apps/data-service/src/providers/yahoo.provider.ts @@ -1,249 +1,249 @@ -import { ProviderConfig } from '../services/provider-registry.service'; -import { getLogger } from '@stock-bot/logger'; - -const logger = getLogger('yahoo-provider'); - -export const yahooProvider: ProviderConfig = { - name: 'yahoo-finance', - service: 'market-data', - operations: { - 'live-data': async (payload: { symbol: string; modules?: string[] }) => { - - - logger.info('Fetching live data from Yahoo Finance', { symbol: payload.symbol }); - - // Simulate Yahoo Finance API call - const mockData = { - symbol: payload.symbol, - regularMarketPrice: Math.random() * 1000 + 100, - regularMarketVolume: Math.floor(Math.random() * 1000000), - regularMarketChange: (Math.random() - 0.5) * 20, - regularMarketChangePercent: (Math.random() - 0.5) * 5, - preMarketPrice: Math.random() * 1000 + 100, - postMarketPrice: Math.random() * 1000 + 100, - marketCap: Math.floor(Math.random() * 1000000000000), - peRatio: Math.random() * 50 + 5, - dividendYield: Math.random() * 0.1, - fiftyTwoWeekHigh: Math.random() * 1200 + 100, - fiftyTwoWeekLow: Math.random() * 800 + 50, - timestamp: Date.now() / 1000, - source: 'yahoo-finance', - modules: payload.modules || ['price', 'summaryDetail'] - }; - - // Simulate network delay - await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 250)); - - return mockData; - }, - - 'historical-data': async (payload: { - symbol: string; - period1: number; - period2: number; - interval?: string; - events?: string; }) => { - const { getLogger } = await import('@stock-bot/logger'); - const logger = getLogger('yahoo-provider'); - - logger.info('Fetching historical data from Yahoo Finance', { - symbol: payload.symbol, - period1: payload.period1, - period2: payload.period2, - interval: payload.interval || '1d' - }); - - // Generate mock historical data - const days = Math.ceil((payload.period2 - payload.period1) / (24 * 60 * 60)); - const data = []; - - for (let i = 0; i < Math.min(days, 100); i++) { - const timestamp = payload.period1 + i * 24 * 60 * 60; - data.push({ - timestamp, - date: new Date(timestamp * 1000).toISOString().split('T')[0], - open: Math.random() * 1000 + 100, - high: Math.random() * 1000 + 100, - low: Math.random() * 1000 + 100, - close: Math.random() * 1000 + 100, - adjClose: Math.random() * 1000 + 100, - volume: Math.floor(Math.random() * 1000000), - source: 'yahoo-finance' - }); - } - - // Simulate network delay - await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 350)); - - return { - symbol: payload.symbol, - interval: payload.interval || '1d', - timestamps: data.map(d => d.timestamp), - indicators: { - quote: [{ - open: data.map(d => d.open), - high: data.map(d => d.high), - low: data.map(d => d.low), - close: data.map(d => d.close), - volume: data.map(d => d.volume) - }], - adjclose: [{ - adjclose: data.map(d => d.adjClose) - }] - }, - source: 'yahoo-finance', - totalRecords: data.length - }; - }, - 'search': async (payload: { query: string; quotesCount?: number; newsCount?: number }) => { - const { getLogger } = await import('@stock-bot/logger'); - const logger = getLogger('yahoo-provider'); - - logger.info('Searching Yahoo Finance', { query: payload.query }); - - // Generate mock search results - const quotes = Array.from({ length: payload.quotesCount || 5 }, (_, i) => ({ - symbol: `${payload.query.toUpperCase()}${i}`, - shortname: `${payload.query} Company ${i}`, - longname: `${payload.query} Corporation ${i}`, - exchDisp: 'NASDAQ', - typeDisp: 'Equity', - source: 'yahoo-finance' - })); - - const news = Array.from({ length: payload.newsCount || 3 }, (_, i) => ({ - uuid: `news-${i}-${Date.now()}`, - title: `${payload.query} News Article ${i}`, - publisher: 'Financial News', - providerPublishTime: Date.now() - i * 3600000, - type: 'STORY', - source: 'yahoo-finance' - })); - - await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 200)); - - return { - quotes, - news, - totalQuotes: quotes.length, - totalNews: news.length, - source: 'yahoo-finance' - }; - }, 'financials': async (payload: { symbol: string; type?: 'income' | 'balance' | 'cash' }) => { - const { getLogger } = await import('@stock-bot/logger'); - const logger = getLogger('yahoo-provider'); - - logger.info('Fetching financials from Yahoo Finance', { - symbol: payload.symbol, - type: payload.type || 'income' - }); - - // Generate mock financial data - const financials = { - symbol: payload.symbol, - type: payload.type || 'income', - currency: 'USD', - annual: Array.from({ length: 4 }, (_, i) => ({ - fiscalYear: 2024 - i, - revenue: Math.floor(Math.random() * 100000000000), - netIncome: Math.floor(Math.random() * 10000000000), - totalAssets: Math.floor(Math.random() * 500000000000), - totalDebt: Math.floor(Math.random() * 50000000000) - })), - quarterly: Array.from({ length: 4 }, (_, i) => ({ - fiscalQuarter: `Q${4-i} 2024`, - revenue: Math.floor(Math.random() * 25000000000), - netIncome: Math.floor(Math.random() * 2500000000) - })), - source: 'yahoo-finance' - }; - - await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200)); - - return financials; - }, 'earnings': async (payload: { symbol: string; period?: 'annual' | 'quarterly' }) => { - const { getLogger } = await import('@stock-bot/logger'); - const logger = getLogger('yahoo-provider'); - - logger.info('Fetching earnings from Yahoo Finance', { - symbol: payload.symbol, - period: payload.period || 'quarterly' - }); - - // Generate mock earnings data - const earnings = { - symbol: payload.symbol, - period: payload.period || 'quarterly', - earnings: Array.from({ length: 8 }, (_, i) => ({ - quarter: `Q${(i % 4) + 1} ${2024 - Math.floor(i/4)}`, - epsEstimate: Math.random() * 5, - epsActual: Math.random() * 5, - revenueEstimate: Math.floor(Math.random() * 50000000000), - revenueActual: Math.floor(Math.random() * 50000000000), - surprise: (Math.random() - 0.5) * 2 - })), - source: 'yahoo-finance' - }; - - await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 150)); - - return earnings; - }, 'recommendations': async (payload: { symbol: string }) => { - const { getLogger } = await import('@stock-bot/logger'); - const logger = getLogger('yahoo-provider'); - - logger.info('Fetching recommendations from Yahoo Finance', { symbol: payload.symbol }); - - // Generate mock recommendations - const recommendations = { - symbol: payload.symbol, - current: { - strongBuy: Math.floor(Math.random() * 10), - buy: Math.floor(Math.random() * 15), - hold: Math.floor(Math.random() * 20), - sell: Math.floor(Math.random() * 5), - strongSell: Math.floor(Math.random() * 3) - }, - trend: Array.from({ length: 4 }, (_, i) => ({ - period: `${i}m`, - strongBuy: Math.floor(Math.random() * 10), - buy: Math.floor(Math.random() * 15), - hold: Math.floor(Math.random() * 20), - sell: Math.floor(Math.random() * 5), - strongSell: Math.floor(Math.random() * 3) - })), - source: 'yahoo-finance' - }; - - await new Promise(resolve => setTimeout(resolve, 180 + Math.random() * 120)); - return recommendations; - } - }, - - scheduledJobs: [ - // { - // type: 'yahoo-market-refresh', - // operation: 'live-data', - // payload: { symbol: 'AAPL' }, - // cronPattern: '*/1 * * * *', // Every minute - // priority: 8, - // description: 'Refresh Apple stock price from Yahoo Finance' - // }, - // { - // type: 'yahoo-sp500-update', - // operation: 'live-data', - // payload: { symbol: 'SPY' }, - // cronPattern: '*/2 * * * *', // Every 2 minutes - // priority: 9, - // description: 'Update S&P 500 ETF price' - // }, - // { - // type: 'yahoo-earnings-check', - // operation: 'earnings', - // payload: { symbol: 'AAPL' }, - // cronPattern: '0 16 * * 1-5', // Weekdays at 4 PM (market close) - // priority: 6, - // description: 'Check earnings data for Apple' - // } - ] -}; +import { ProviderConfig } from '../services/provider-registry.service'; +import { getLogger } from '@stock-bot/logger'; + +const logger = getLogger('yahoo-provider'); + +export const yahooProvider: ProviderConfig = { + name: 'yahoo-finance', + service: 'market-data', + operations: { + 'live-data': async (payload: { symbol: string; modules?: string[] }) => { + + + logger.info('Fetching live data from Yahoo Finance', { symbol: payload.symbol }); + + // Simulate Yahoo Finance API call + const mockData = { + symbol: payload.symbol, + regularMarketPrice: Math.random() * 1000 + 100, + regularMarketVolume: Math.floor(Math.random() * 1000000), + regularMarketChange: (Math.random() - 0.5) * 20, + regularMarketChangePercent: (Math.random() - 0.5) * 5, + preMarketPrice: Math.random() * 1000 + 100, + postMarketPrice: Math.random() * 1000 + 100, + marketCap: Math.floor(Math.random() * 1000000000000), + peRatio: Math.random() * 50 + 5, + dividendYield: Math.random() * 0.1, + fiftyTwoWeekHigh: Math.random() * 1200 + 100, + fiftyTwoWeekLow: Math.random() * 800 + 50, + timestamp: Date.now() / 1000, + source: 'yahoo-finance', + modules: payload.modules || ['price', 'summaryDetail'] + }; + + // Simulate network delay + await new Promise(resolve => setTimeout(resolve, 150 + Math.random() * 250)); + + return mockData; + }, + + 'historical-data': async (payload: { + symbol: string; + period1: number; + period2: number; + interval?: string; + events?: string; }) => { + const { getLogger } = await import('@stock-bot/logger'); + const logger = getLogger('yahoo-provider'); + + logger.info('Fetching historical data from Yahoo Finance', { + symbol: payload.symbol, + period1: payload.period1, + period2: payload.period2, + interval: payload.interval || '1d' + }); + + // Generate mock historical data + const days = Math.ceil((payload.period2 - payload.period1) / (24 * 60 * 60)); + const data = []; + + for (let i = 0; i < Math.min(days, 100); i++) { + const timestamp = payload.period1 + i * 24 * 60 * 60; + data.push({ + timestamp, + date: new Date(timestamp * 1000).toISOString().split('T')[0], + open: Math.random() * 1000 + 100, + high: Math.random() * 1000 + 100, + low: Math.random() * 1000 + 100, + close: Math.random() * 1000 + 100, + adjClose: Math.random() * 1000 + 100, + volume: Math.floor(Math.random() * 1000000), + source: 'yahoo-finance' + }); + } + + // Simulate network delay + await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 350)); + + return { + symbol: payload.symbol, + interval: payload.interval || '1d', + timestamps: data.map(d => d.timestamp), + indicators: { + quote: [{ + open: data.map(d => d.open), + high: data.map(d => d.high), + low: data.map(d => d.low), + close: data.map(d => d.close), + volume: data.map(d => d.volume) + }], + adjclose: [{ + adjclose: data.map(d => d.adjClose) + }] + }, + source: 'yahoo-finance', + totalRecords: data.length + }; + }, + 'search': async (payload: { query: string; quotesCount?: number; newsCount?: number }) => { + const { getLogger } = await import('@stock-bot/logger'); + const logger = getLogger('yahoo-provider'); + + logger.info('Searching Yahoo Finance', { query: payload.query }); + + // Generate mock search results + const quotes = Array.from({ length: payload.quotesCount || 5 }, (_, i) => ({ + symbol: `${payload.query.toUpperCase()}${i}`, + shortname: `${payload.query} Company ${i}`, + longname: `${payload.query} Corporation ${i}`, + exchDisp: 'NASDAQ', + typeDisp: 'Equity', + source: 'yahoo-finance' + })); + + const news = Array.from({ length: payload.newsCount || 3 }, (_, i) => ({ + uuid: `news-${i}-${Date.now()}`, + title: `${payload.query} News Article ${i}`, + publisher: 'Financial News', + providerPublishTime: Date.now() - i * 3600000, + type: 'STORY', + source: 'yahoo-finance' + })); + + await new Promise(resolve => setTimeout(resolve, 200 + Math.random() * 200)); + + return { + quotes, + news, + totalQuotes: quotes.length, + totalNews: news.length, + source: 'yahoo-finance' + }; + }, 'financials': async (payload: { symbol: string; type?: 'income' | 'balance' | 'cash' }) => { + const { getLogger } = await import('@stock-bot/logger'); + const logger = getLogger('yahoo-provider'); + + logger.info('Fetching financials from Yahoo Finance', { + symbol: payload.symbol, + type: payload.type || 'income' + }); + + // Generate mock financial data + const financials = { + symbol: payload.symbol, + type: payload.type || 'income', + currency: 'USD', + annual: Array.from({ length: 4 }, (_, i) => ({ + fiscalYear: 2024 - i, + revenue: Math.floor(Math.random() * 100000000000), + netIncome: Math.floor(Math.random() * 10000000000), + totalAssets: Math.floor(Math.random() * 500000000000), + totalDebt: Math.floor(Math.random() * 50000000000) + })), + quarterly: Array.from({ length: 4 }, (_, i) => ({ + fiscalQuarter: `Q${4-i} 2024`, + revenue: Math.floor(Math.random() * 25000000000), + netIncome: Math.floor(Math.random() * 2500000000) + })), + source: 'yahoo-finance' + }; + + await new Promise(resolve => setTimeout(resolve, 300 + Math.random() * 200)); + + return financials; + }, 'earnings': async (payload: { symbol: string; period?: 'annual' | 'quarterly' }) => { + const { getLogger } = await import('@stock-bot/logger'); + const logger = getLogger('yahoo-provider'); + + logger.info('Fetching earnings from Yahoo Finance', { + symbol: payload.symbol, + period: payload.period || 'quarterly' + }); + + // Generate mock earnings data + const earnings = { + symbol: payload.symbol, + period: payload.period || 'quarterly', + earnings: Array.from({ length: 8 }, (_, i) => ({ + quarter: `Q${(i % 4) + 1} ${2024 - Math.floor(i/4)}`, + epsEstimate: Math.random() * 5, + epsActual: Math.random() * 5, + revenueEstimate: Math.floor(Math.random() * 50000000000), + revenueActual: Math.floor(Math.random() * 50000000000), + surprise: (Math.random() - 0.5) * 2 + })), + source: 'yahoo-finance' + }; + + await new Promise(resolve => setTimeout(resolve, 250 + Math.random() * 150)); + + return earnings; + }, 'recommendations': async (payload: { symbol: string }) => { + const { getLogger } = await import('@stock-bot/logger'); + const logger = getLogger('yahoo-provider'); + + logger.info('Fetching recommendations from Yahoo Finance', { symbol: payload.symbol }); + + // Generate mock recommendations + const recommendations = { + symbol: payload.symbol, + current: { + strongBuy: Math.floor(Math.random() * 10), + buy: Math.floor(Math.random() * 15), + hold: Math.floor(Math.random() * 20), + sell: Math.floor(Math.random() * 5), + strongSell: Math.floor(Math.random() * 3) + }, + trend: Array.from({ length: 4 }, (_, i) => ({ + period: `${i}m`, + strongBuy: Math.floor(Math.random() * 10), + buy: Math.floor(Math.random() * 15), + hold: Math.floor(Math.random() * 20), + sell: Math.floor(Math.random() * 5), + strongSell: Math.floor(Math.random() * 3) + })), + source: 'yahoo-finance' + }; + + await new Promise(resolve => setTimeout(resolve, 180 + Math.random() * 120)); + return recommendations; + } + }, + + scheduledJobs: [ + // { + // type: 'yahoo-market-refresh', + // operation: 'live-data', + // payload: { symbol: 'AAPL' }, + // cronPattern: '*/1 * * * *', // Every minute + // priority: 8, + // description: 'Refresh Apple stock price from Yahoo Finance' + // }, + // { + // type: 'yahoo-sp500-update', + // operation: 'live-data', + // payload: { symbol: 'SPY' }, + // cronPattern: '*/2 * * * *', // Every 2 minutes + // priority: 9, + // description: 'Update S&P 500 ETF price' + // }, + // { + // type: 'yahoo-earnings-check', + // operation: 'earnings', + // payload: { symbol: 'AAPL' }, + // cronPattern: '0 16 * * 1-5', // Weekdays at 4 PM (market close) + // priority: 6, + // description: 'Check earnings data for Apple' + // } + ] +}; diff --git a/apps/data-service/src/proxy-demo.ts b/apps/data-service/src/proxy-demo.ts index 7b02acf..eeb7db9 100644 --- a/apps/data-service/src/proxy-demo.ts +++ b/apps/data-service/src/proxy-demo.ts @@ -1,24 +1,24 @@ -import { proxyService } from './providers/proxy.tasks'; -import { getLogger } from '@stock-bot/logger'; - -// Initialize logger for the demo -const logger = getLogger('proxy-demo'); -console.log('πŸ”§ Starting proxy demo...'); -/** - * Example: Custom proxy source with enhanced logging - */ -async function demonstrateCustomProxySource() { - console.log('πŸ”§ Demonstrating!'); - logger.info('πŸ”§ Demonstrating custom proxy source...'); - - try { - console.log('πŸ”§ Demonstrating 1'); - await proxyService.fetchProxiesFromSources(); - console.log('πŸ”§ Demonstrating custom proxy source is DONE!'); - } catch (error) { - logger.error('❌ Custom source scraping failed',{ - error: error - }); - } -} -demonstrateCustomProxySource() +import { proxyService } from './providers/proxy.tasks'; +import { getLogger } from '@stock-bot/logger'; + +// Initialize logger for the demo +const logger = getLogger('proxy-demo'); +console.log('πŸ”§ Starting proxy demo...'); +/** + * Example: Custom proxy source with enhanced logging + */ +async function demonstrateCustomProxySource() { + console.log('πŸ”§ Demonstrating!'); + logger.info('πŸ”§ Demonstrating custom proxy source...'); + + try { + console.log('πŸ”§ Demonstrating 1'); + await proxyService.fetchProxiesFromSources(); + console.log('πŸ”§ Demonstrating custom proxy source is DONE!'); + } catch (error) { + logger.error('❌ Custom source scraping failed',{ + error: error + }); + } +} +demonstrateCustomProxySource() diff --git a/apps/data-service/src/services/provider-registry.service.ts b/apps/data-service/src/services/provider-registry.service.ts index e2d9be3..ac7ab0f 100644 --- a/apps/data-service/src/services/provider-registry.service.ts +++ b/apps/data-service/src/services/provider-registry.service.ts @@ -1,115 +1,115 @@ -import { getLogger } from '@stock-bot/logger'; - -export interface JobHandler { - (payload: any): Promise; -} - -export interface ScheduledJob { - type: string; - operation: string; - payload: any; - cronPattern: string; - priority?: number; - description?: string; - immediately?: boolean; -} - -export interface ProviderConfig { - name: string; - service: string; - operations: Record; - scheduledJobs?: ScheduledJob[]; -} - -export class ProviderRegistry { - private logger = getLogger('provider-registry'); - private providers = new Map(); - - /** - * Register a provider with its operations - */ registerProvider(config: ProviderConfig): void { - const key = `${config.service}:${config.name}`; - this.providers.set(key, config); - this.logger.info(`Registered provider: ${key}`, { - operations: Object.keys(config.operations), - scheduledJobs: config.scheduledJobs?.length || 0 - }); - } - - /** - * Get a job handler for a specific provider and operation - */ - getHandler(service: string, provider: string, operation: string): JobHandler | null { - const key = `${service}:${provider}`; - const providerConfig = this.providers.get(key); - - if (!providerConfig) { - this.logger.warn(`Provider not found: ${key}`); - return null; - } - - const handler = providerConfig.operations[operation]; - if (!handler) { - this.logger.warn(`Operation not found: ${operation} in provider ${key}`); - return null; - } - - return handler; - } - - /** - * Get all registered providers - */ - getAllScheduledJobs(): Array<{ - service: string; - provider: string; - job: ScheduledJob; - }> { - const allJobs: Array<{ service: string; provider: string; job: ScheduledJob }> = []; - - for (const [key, config] of this.providers) { - if (config.scheduledJobs) { - for (const job of config.scheduledJobs) { - allJobs.push({ - service: config.service, - provider: config.name, - job - }); - } - } - } - - return allJobs; - } - - getProviders(): Array<{ key: string; config: ProviderConfig }> { - return Array.from(this.providers.entries()).map(([key, config]) => ({ - key, - config - })); - } - - /** - * Check if a provider exists - */ - hasProvider(service: string, provider: string): boolean { - return this.providers.has(`${service}:${provider}`); - } - - /** - * Get providers by service type - */ - getProvidersByService(service: string): ProviderConfig[] { - return Array.from(this.providers.values()).filter(provider => provider.service === service); - } - - /** - * Clear all providers (useful for testing) - */ - clear(): void { - this.providers.clear(); - this.logger.info('All providers cleared'); - } -} - -export const providerRegistry = new ProviderRegistry(); +import { getLogger } from '@stock-bot/logger'; + +export interface JobHandler { + (payload: any): Promise; +} + +export interface ScheduledJob { + type: string; + operation: string; + payload: any; + cronPattern: string; + priority?: number; + description?: string; + immediately?: boolean; +} + +export interface ProviderConfig { + name: string; + service: string; + operations: Record; + scheduledJobs?: ScheduledJob[]; +} + +export class ProviderRegistry { + private logger = getLogger('provider-registry'); + private providers = new Map(); + + /** + * Register a provider with its operations + */ registerProvider(config: ProviderConfig): void { + const key = `${config.service}:${config.name}`; + this.providers.set(key, config); + this.logger.info(`Registered provider: ${key}`, { + operations: Object.keys(config.operations), + scheduledJobs: config.scheduledJobs?.length || 0 + }); + } + + /** + * Get a job handler for a specific provider and operation + */ + getHandler(service: string, provider: string, operation: string): JobHandler | null { + const key = `${service}:${provider}`; + const providerConfig = this.providers.get(key); + + if (!providerConfig) { + this.logger.warn(`Provider not found: ${key}`); + return null; + } + + const handler = providerConfig.operations[operation]; + if (!handler) { + this.logger.warn(`Operation not found: ${operation} in provider ${key}`); + return null; + } + + return handler; + } + + /** + * Get all registered providers + */ + getAllScheduledJobs(): Array<{ + service: string; + provider: string; + job: ScheduledJob; + }> { + const allJobs: Array<{ service: string; provider: string; job: ScheduledJob }> = []; + + for (const [key, config] of this.providers) { + if (config.scheduledJobs) { + for (const job of config.scheduledJobs) { + allJobs.push({ + service: config.service, + provider: config.name, + job + }); + } + } + } + + return allJobs; + } + + getProviders(): Array<{ key: string; config: ProviderConfig }> { + return Array.from(this.providers.entries()).map(([key, config]) => ({ + key, + config + })); + } + + /** + * Check if a provider exists + */ + hasProvider(service: string, provider: string): boolean { + return this.providers.has(`${service}:${provider}`); + } + + /** + * Get providers by service type + */ + getProvidersByService(service: string): ProviderConfig[] { + return Array.from(this.providers.values()).filter(provider => provider.service === service); + } + + /** + * Clear all providers (useful for testing) + */ + clear(): void { + this.providers.clear(); + this.logger.info('All providers cleared'); + } +} + +export const providerRegistry = new ProviderRegistry(); diff --git a/apps/data-service/src/services/queue.service.ts b/apps/data-service/src/services/queue.service.ts index f057103..f3d3c26 100644 --- a/apps/data-service/src/services/queue.service.ts +++ b/apps/data-service/src/services/queue.service.ts @@ -1,478 +1,478 @@ -import { Queue, Worker, QueueEvents } from 'bullmq'; -import { getLogger } from '@stock-bot/logger'; -import { providerRegistry } from './provider-registry.service'; - -export interface JobData { - type: string; - service: string; - provider: string; - operation: string; - payload: any; - priority?: number; - immediately?: boolean; -} - -export class QueueService { - private logger = getLogger('queue-service'); - private queue!: Queue; - private workers: Worker[] = []; - private queueEvents!: QueueEvents; - private isInitialized = false; - - constructor() { - // Don't initialize in constructor to allow for proper async initialization - } - - async initialize() { - if (this.isInitialized) { - this.logger.warn('Queue service already initialized'); - return; - } - - this.logger.info('Initializing queue service...'); - - // Register all providers first - await this.registerProviders(); - - const connection = { - host: process.env.DRAGONFLY_HOST || 'localhost', - port: parseInt(process.env.DRAGONFLY_PORT || '6379'), - // Add these Redis-specific options to fix the undeclared key issue - maxRetriesPerRequest: null, - retryDelayOnFailover: 100, - enableReadyCheck: false, - lazyConnect: true, - // Disable Redis Cluster mode if you're using standalone Redis/Dragonfly - enableOfflineQueue: false - }; - - // Worker configuration - const workerCount = parseInt(process.env.WORKER_COUNT || '5'); - const concurrencyPerWorker = parseInt(process.env.WORKER_CONCURRENCY || '20'); - - this.logger.info('Connecting to Redis/Dragonfly', connection); - - try { - this.queue = new Queue('{data-service-queue}', { - connection, - defaultJobOptions: { - removeOnComplete: 10, - removeOnFail: 5, - attempts: 3, - backoff: { - type: 'exponential', - delay: 1000, - } - } - }); - // Create multiple workers - for (let i = 0; i < workerCount; i++) { - const worker = new Worker( - '{data-service-queue}', - this.processJob.bind(this), - { - connection: { ...connection }, // Each worker gets its own connection - concurrency: concurrencyPerWorker, - maxStalledCount: 1, - stalledInterval: 30000, - } - ); - // Add worker-specific logging - worker.on('ready', () => { - this.logger.info(`Worker ${i + 1} ready`, { workerId: i + 1 }); - }); - - worker.on('error', (error) => { - this.logger.error(`Worker ${i + 1} error`, { workerId: i + 1, error }); - }); - - this.workers.push(worker); - } - this.queueEvents = new QueueEvents('{data-service-queue}', { connection }); // Test connection - - // Wait for all workers to be ready - await this.queue.waitUntilReady(); - await Promise.all(this.workers.map(worker => worker.waitUntilReady())); - await this.queueEvents.waitUntilReady(); - - this.setupEventListeners(); - this.isInitialized = true; - this.logger.info('Queue service initialized successfully'); - - await this.setupScheduledTasks(); - - } catch (error) { - this.logger.error('Failed to initialize queue service', { error }); - throw error; - } - } - - // Update getTotalConcurrency method - getTotalConcurrency() { - if (!this.isInitialized) { - return 0; - } - return this.workers.reduce((total, worker) => { - return total + (worker.opts.concurrency || 1); - }, 0); - } - - private async registerProviders() { - this.logger.info('Registering providers...'); - - try { - // Import and register all providers - const { proxyProvider } = await import('../providers/proxy.provider'); - const { quotemediaProvider } = await import('../providers/quotemedia.provider'); - const { yahooProvider } = await import('../providers/yahoo.provider'); - - providerRegistry.registerProvider(proxyProvider); - providerRegistry.registerProvider(quotemediaProvider); - providerRegistry.registerProvider(yahooProvider); - - this.logger.info('All providers registered successfully'); - } catch (error) { - this.logger.error('Failed to register providers', { error }); - throw error; - } - } - - private async processJob(job: any) { - const { service, provider, operation, payload }: JobData = job.data; - - this.logger.info('Processing job', { - id: job.id, - service, - provider, - operation, - payloadKeys: Object.keys(payload || {}) - }); - - try { - // Get handler from registry - const handler = providerRegistry.getHandler(service, provider, operation); - - if (!handler) { - throw new Error(`No handler found for ${service}:${provider}:${operation}`); - } - - // Execute the handler - const result = await handler(payload); - - this.logger.info('Job completed successfully', { - id: job.id, - service, - provider, - operation - }); - - return result; - - } catch (error) { - const errorMessage = error instanceof Error ? error.message : String(error); - this.logger.error('Job failed', { - id: job.id, - service, - provider, - operation, - error: errorMessage - }); - throw error; - } - } - - async addBulk(jobs: any[]) : Promise { - return await this.queue.addBulk(jobs) - } - private setupEventListeners() { - this.queueEvents.on('completed', (job) => { - this.logger.info('Job completed', { id: job.jobId }); - }); - - this.queueEvents.on('failed', (job) => { - this.logger.error('Job failed', { id: job.jobId, error: job.failedReason }); - }); - - // Note: Worker-specific events are already set up during worker creation - // No need for additional progress events since we handle them per-worker - } - private async setupScheduledTasks() { - try { - this.logger.info('Setting up scheduled tasks from providers...'); - - // Get all scheduled jobs from all providers - const allScheduledJobs = providerRegistry.getAllScheduledJobs(); - - if (allScheduledJobs.length === 0) { - this.logger.warn('No scheduled jobs found in providers'); - return; - } - - // Get existing repeatable jobs for comparison - const existingJobs = await this.queue.getRepeatableJobs(); - this.logger.info(`Found ${existingJobs.length} existing repeatable jobs`); - - let successCount = 0; - let failureCount = 0; - let updatedCount = 0; - let newCount = 0; - - // Process each scheduled job - for (const { service, provider, job } of allScheduledJobs) { - try { - const jobKey = `${service}-${provider}-${job.operation}`; - - // Check if this job already exists - const existingJob = existingJobs.find(existing => - existing.key?.includes(jobKey) || existing.name === job.type - ); - - if (existingJob) { - // Check if the job needs updating (different cron pattern or config) - const needsUpdate = existingJob.pattern !== job.cronPattern; - - if (needsUpdate) { - this.logger.info('Job configuration changed, updating', { - jobKey, - oldPattern: existingJob.pattern, - newPattern: job.cronPattern - }); - updatedCount++; - } else { - this.logger.debug('Job unchanged, skipping', { jobKey }); - successCount++; - continue; - } - } else { - newCount++; - } - - // Add delay between job registrations - await new Promise(resolve => setTimeout(resolve, 100)); - - await this.addRecurringJob({ - type: job.type, - service: service, - provider: provider, - operation: job.operation, - payload: job.payload, - priority: job.priority, - immediately: job.immediately || false - }, job.cronPattern); - - this.logger.info('Scheduled job registered', { - type: job.type, - service, - provider, - operation: job.operation, - cronPattern: job.cronPattern, - description: job.description, - immediately: job.immediately || false - }); - - successCount++; - - } catch (error) { - this.logger.error('Failed to register scheduled job', { - type: job.type, - service, - provider, - error: error instanceof Error ? error.message : String(error) - }); - failureCount++; - } - } - - this.logger.info(`Scheduled tasks setup complete`, { - total: allScheduledJobs.length, - successful: successCount, - failed: failureCount, - updated: updatedCount, - new: newCount - }); - - } catch (error) { - this.logger.error('Failed to setup scheduled tasks', error); - } -} - - async addJob(jobData: JobData, options?: any) { - if (!this.isInitialized) { - throw new Error('Queue service not initialized. Call initialize() first.'); - } - return this.queue.add(jobData.type, jobData, { - priority: jobData.priority || 0, - removeOnComplete: 10, - removeOnFail: 5, - ...options - }); - } - - async addRecurringJob(jobData: JobData, cronPattern: string, options?: any) { - if (!this.isInitialized) { - throw new Error('Queue service not initialized. Call initialize() first.'); - } - - try { - // Create a unique job key for this specific job - const jobKey = `${jobData.service}-${jobData.provider}-${jobData.operation}`; - - // Get all existing repeatable jobs - const existingJobs = await this.queue.getRepeatableJobs(); - - // Find and remove the existing job with the same key if it exists - const existingJob = existingJobs.find(job => { - // Check if this is the same job by comparing key components - return job.key?.includes(jobKey) || job.name === jobData.type; - }); - - if (existingJob) { - this.logger.info('Updating existing recurring job', { - jobKey, - existingPattern: existingJob.pattern, - newPattern: cronPattern - }); - - // Remove the existing job - await this.queue.removeRepeatableByKey(existingJob.key); - - // Small delay to ensure cleanup is complete - await new Promise(resolve => setTimeout(resolve, 100)); - } else { - this.logger.info('Creating new recurring job', { jobKey, cronPattern }); - } - - // Add the new/updated recurring job - const job = await this.queue.add(jobData.type, jobData, { - repeat: { - pattern: cronPattern, - tz: 'UTC', - immediately: jobData.immediately || false, - }, - // Use a consistent jobId for this specific recurring job - jobId: `recurring-${jobKey}`, - removeOnComplete: 1, - removeOnFail: 1, - attempts: 2, - backoff: { - type: 'fixed', - delay: 5000 - }, - ...options - }); - - this.logger.info('Recurring job added/updated successfully', { - jobKey, - type: jobData.type, - cronPattern, - immediately: jobData.immediately || false - }); - - return job; - - } catch (error) { - this.logger.error('Failed to add/update recurring job', { - jobData, - cronPattern, - error: error instanceof Error ? error.message : String(error) - }); - throw error; - } - } - - async getJobStats() { - if (!this.isInitialized) { - throw new Error('Queue service not initialized. Call initialize() first.'); - } - const [waiting, active, completed, failed, delayed] = await Promise.all([ - this.queue.getWaiting(), - this.queue.getActive(), - this.queue.getCompleted(), - this.queue.getFailed(), - this.queue.getDelayed() - ]); - - return { - waiting: waiting.length, - active: active.length, - completed: completed.length, - failed: failed.length, - delayed: delayed.length - }; - } - - async drainQueue() { - if (!this.isInitialized) { - await this.queue.drain() - } - } - - async getQueueStatus() { - if (!this.isInitialized) { - throw new Error('Queue service not initialized. Call initialize() first.'); - } - const stats = await this.getJobStats(); - return { - ...stats, - workers: this.getWorkerCount(), - totalConcurrency: this.getTotalConcurrency(), - queue: this.queue.name, - connection: { - host: process.env.DRAGONFLY_HOST || 'localhost', - port: parseInt(process.env.DRAGONFLY_PORT || '6379') - } - }; - } - - getWorkerCount() { - if (!this.isInitialized) { - return 0; - } - return this.workers.length; - } - - getRegisteredProviders() { - return providerRegistry.getProviders().map(({ key, config }) => ({ - key, - name: config.name, - service: config.service, - operations: Object.keys(config.operations), - scheduledJobs: config.scheduledJobs?.length || 0 - })); - } - - getScheduledJobsInfo() { - return providerRegistry.getAllScheduledJobs().map(({ service, provider, job }) => ({ - id: `${service}-${provider}-${job.type}`, - service, - provider, - type: job.type, - operation: job.operation, - cronPattern: job.cronPattern, - priority: job.priority, - description: job.description, - immediately: job.immediately || false - })); - } - async shutdown() { - if (!this.isInitialized) { - this.logger.warn('Queue service not initialized, nothing to shutdown'); - return; - } - this.logger.info('Shutting down queue service'); - - // Close all workers - this.logger.info(`Closing ${this.workers.length} workers...`); - await Promise.all(this.workers.map((worker, index) => { - this.logger.debug(`Closing worker ${index + 1}`); - return worker.close(); - })); - - await this.queue.close(); - await this.queueEvents.close(); - this.isInitialized = false; - this.logger.info('Queue service shutdown complete'); - } -} - -export const queueManager = new QueueService(); +import { Queue, Worker, QueueEvents } from 'bullmq'; +import { getLogger } from '@stock-bot/logger'; +import { providerRegistry } from './provider-registry.service'; + +export interface JobData { + type: string; + service: string; + provider: string; + operation: string; + payload: any; + priority?: number; + immediately?: boolean; +} + +export class QueueService { + private logger = getLogger('queue-service'); + private queue!: Queue; + private workers: Worker[] = []; + private queueEvents!: QueueEvents; + private isInitialized = false; + + constructor() { + // Don't initialize in constructor to allow for proper async initialization + } + + async initialize() { + if (this.isInitialized) { + this.logger.warn('Queue service already initialized'); + return; + } + + this.logger.info('Initializing queue service...'); + + // Register all providers first + await this.registerProviders(); + + const connection = { + host: process.env.DRAGONFLY_HOST || 'localhost', + port: parseInt(process.env.DRAGONFLY_PORT || '6379'), + // Add these Redis-specific options to fix the undeclared key issue + maxRetriesPerRequest: null, + retryDelayOnFailover: 100, + enableReadyCheck: false, + lazyConnect: true, + // Disable Redis Cluster mode if you're using standalone Redis/Dragonfly + enableOfflineQueue: false + }; + + // Worker configuration + const workerCount = parseInt(process.env.WORKER_COUNT || '5'); + const concurrencyPerWorker = parseInt(process.env.WORKER_CONCURRENCY || '20'); + + this.logger.info('Connecting to Redis/Dragonfly', connection); + + try { + this.queue = new Queue('{data-service-queue}', { + connection, + defaultJobOptions: { + removeOnComplete: 10, + removeOnFail: 5, + attempts: 3, + backoff: { + type: 'exponential', + delay: 1000, + } + } + }); + // Create multiple workers + for (let i = 0; i < workerCount; i++) { + const worker = new Worker( + '{data-service-queue}', + this.processJob.bind(this), + { + connection: { ...connection }, // Each worker gets its own connection + concurrency: concurrencyPerWorker, + maxStalledCount: 1, + stalledInterval: 30000, + } + ); + // Add worker-specific logging + worker.on('ready', () => { + this.logger.info(`Worker ${i + 1} ready`, { workerId: i + 1 }); + }); + + worker.on('error', (error) => { + this.logger.error(`Worker ${i + 1} error`, { workerId: i + 1, error }); + }); + + this.workers.push(worker); + } + this.queueEvents = new QueueEvents('{data-service-queue}', { connection }); // Test connection + + // Wait for all workers to be ready + await this.queue.waitUntilReady(); + await Promise.all(this.workers.map(worker => worker.waitUntilReady())); + await this.queueEvents.waitUntilReady(); + + this.setupEventListeners(); + this.isInitialized = true; + this.logger.info('Queue service initialized successfully'); + + await this.setupScheduledTasks(); + + } catch (error) { + this.logger.error('Failed to initialize queue service', { error }); + throw error; + } + } + + // Update getTotalConcurrency method + getTotalConcurrency() { + if (!this.isInitialized) { + return 0; + } + return this.workers.reduce((total, worker) => { + return total + (worker.opts.concurrency || 1); + }, 0); + } + + private async registerProviders() { + this.logger.info('Registering providers...'); + + try { + // Import and register all providers + const { proxyProvider } = await import('../providers/proxy.provider'); + const { quotemediaProvider } = await import('../providers/quotemedia.provider'); + const { yahooProvider } = await import('../providers/yahoo.provider'); + + providerRegistry.registerProvider(proxyProvider); + providerRegistry.registerProvider(quotemediaProvider); + providerRegistry.registerProvider(yahooProvider); + + this.logger.info('All providers registered successfully'); + } catch (error) { + this.logger.error('Failed to register providers', { error }); + throw error; + } + } + + private async processJob(job: any) { + const { service, provider, operation, payload }: JobData = job.data; + + this.logger.info('Processing job', { + id: job.id, + service, + provider, + operation, + payloadKeys: Object.keys(payload || {}) + }); + + try { + // Get handler from registry + const handler = providerRegistry.getHandler(service, provider, operation); + + if (!handler) { + throw new Error(`No handler found for ${service}:${provider}:${operation}`); + } + + // Execute the handler + const result = await handler(payload); + + this.logger.info('Job completed successfully', { + id: job.id, + service, + provider, + operation + }); + + return result; + + } catch (error) { + const errorMessage = error instanceof Error ? error.message : String(error); + this.logger.error('Job failed', { + id: job.id, + service, + provider, + operation, + error: errorMessage + }); + throw error; + } + } + + async addBulk(jobs: any[]) : Promise { + return await this.queue.addBulk(jobs) + } + private setupEventListeners() { + this.queueEvents.on('completed', (job) => { + this.logger.info('Job completed', { id: job.jobId }); + }); + + this.queueEvents.on('failed', (job) => { + this.logger.error('Job failed', { id: job.jobId, error: job.failedReason }); + }); + + // Note: Worker-specific events are already set up during worker creation + // No need for additional progress events since we handle them per-worker + } + private async setupScheduledTasks() { + try { + this.logger.info('Setting up scheduled tasks from providers...'); + + // Get all scheduled jobs from all providers + const allScheduledJobs = providerRegistry.getAllScheduledJobs(); + + if (allScheduledJobs.length === 0) { + this.logger.warn('No scheduled jobs found in providers'); + return; + } + + // Get existing repeatable jobs for comparison + const existingJobs = await this.queue.getRepeatableJobs(); + this.logger.info(`Found ${existingJobs.length} existing repeatable jobs`); + + let successCount = 0; + let failureCount = 0; + let updatedCount = 0; + let newCount = 0; + + // Process each scheduled job + for (const { service, provider, job } of allScheduledJobs) { + try { + const jobKey = `${service}-${provider}-${job.operation}`; + + // Check if this job already exists + const existingJob = existingJobs.find(existing => + existing.key?.includes(jobKey) || existing.name === job.type + ); + + if (existingJob) { + // Check if the job needs updating (different cron pattern or config) + const needsUpdate = existingJob.pattern !== job.cronPattern; + + if (needsUpdate) { + this.logger.info('Job configuration changed, updating', { + jobKey, + oldPattern: existingJob.pattern, + newPattern: job.cronPattern + }); + updatedCount++; + } else { + this.logger.debug('Job unchanged, skipping', { jobKey }); + successCount++; + continue; + } + } else { + newCount++; + } + + // Add delay between job registrations + await new Promise(resolve => setTimeout(resolve, 100)); + + await this.addRecurringJob({ + type: job.type, + service: service, + provider: provider, + operation: job.operation, + payload: job.payload, + priority: job.priority, + immediately: job.immediately || false + }, job.cronPattern); + + this.logger.info('Scheduled job registered', { + type: job.type, + service, + provider, + operation: job.operation, + cronPattern: job.cronPattern, + description: job.description, + immediately: job.immediately || false + }); + + successCount++; + + } catch (error) { + this.logger.error('Failed to register scheduled job', { + type: job.type, + service, + provider, + error: error instanceof Error ? error.message : String(error) + }); + failureCount++; + } + } + + this.logger.info(`Scheduled tasks setup complete`, { + total: allScheduledJobs.length, + successful: successCount, + failed: failureCount, + updated: updatedCount, + new: newCount + }); + + } catch (error) { + this.logger.error('Failed to setup scheduled tasks', error); + } +} + + async addJob(jobData: JobData, options?: any) { + if (!this.isInitialized) { + throw new Error('Queue service not initialized. Call initialize() first.'); + } + return this.queue.add(jobData.type, jobData, { + priority: jobData.priority || 0, + removeOnComplete: 10, + removeOnFail: 5, + ...options + }); + } + + async addRecurringJob(jobData: JobData, cronPattern: string, options?: any) { + if (!this.isInitialized) { + throw new Error('Queue service not initialized. Call initialize() first.'); + } + + try { + // Create a unique job key for this specific job + const jobKey = `${jobData.service}-${jobData.provider}-${jobData.operation}`; + + // Get all existing repeatable jobs + const existingJobs = await this.queue.getRepeatableJobs(); + + // Find and remove the existing job with the same key if it exists + const existingJob = existingJobs.find(job => { + // Check if this is the same job by comparing key components + return job.key?.includes(jobKey) || job.name === jobData.type; + }); + + if (existingJob) { + this.logger.info('Updating existing recurring job', { + jobKey, + existingPattern: existingJob.pattern, + newPattern: cronPattern + }); + + // Remove the existing job + await this.queue.removeRepeatableByKey(existingJob.key); + + // Small delay to ensure cleanup is complete + await new Promise(resolve => setTimeout(resolve, 100)); + } else { + this.logger.info('Creating new recurring job', { jobKey, cronPattern }); + } + + // Add the new/updated recurring job + const job = await this.queue.add(jobData.type, jobData, { + repeat: { + pattern: cronPattern, + tz: 'UTC', + immediately: jobData.immediately || false, + }, + // Use a consistent jobId for this specific recurring job + jobId: `recurring-${jobKey}`, + removeOnComplete: 1, + removeOnFail: 1, + attempts: 2, + backoff: { + type: 'fixed', + delay: 5000 + }, + ...options + }); + + this.logger.info('Recurring job added/updated successfully', { + jobKey, + type: jobData.type, + cronPattern, + immediately: jobData.immediately || false + }); + + return job; + + } catch (error) { + this.logger.error('Failed to add/update recurring job', { + jobData, + cronPattern, + error: error instanceof Error ? error.message : String(error) + }); + throw error; + } + } + + async getJobStats() { + if (!this.isInitialized) { + throw new Error('Queue service not initialized. Call initialize() first.'); + } + const [waiting, active, completed, failed, delayed] = await Promise.all([ + this.queue.getWaiting(), + this.queue.getActive(), + this.queue.getCompleted(), + this.queue.getFailed(), + this.queue.getDelayed() + ]); + + return { + waiting: waiting.length, + active: active.length, + completed: completed.length, + failed: failed.length, + delayed: delayed.length + }; + } + + async drainQueue() { + if (!this.isInitialized) { + await this.queue.drain() + } + } + + async getQueueStatus() { + if (!this.isInitialized) { + throw new Error('Queue service not initialized. Call initialize() first.'); + } + const stats = await this.getJobStats(); + return { + ...stats, + workers: this.getWorkerCount(), + totalConcurrency: this.getTotalConcurrency(), + queue: this.queue.name, + connection: { + host: process.env.DRAGONFLY_HOST || 'localhost', + port: parseInt(process.env.DRAGONFLY_PORT || '6379') + } + }; + } + + getWorkerCount() { + if (!this.isInitialized) { + return 0; + } + return this.workers.length; + } + + getRegisteredProviders() { + return providerRegistry.getProviders().map(({ key, config }) => ({ + key, + name: config.name, + service: config.service, + operations: Object.keys(config.operations), + scheduledJobs: config.scheduledJobs?.length || 0 + })); + } + + getScheduledJobsInfo() { + return providerRegistry.getAllScheduledJobs().map(({ service, provider, job }) => ({ + id: `${service}-${provider}-${job.type}`, + service, + provider, + type: job.type, + operation: job.operation, + cronPattern: job.cronPattern, + priority: job.priority, + description: job.description, + immediately: job.immediately || false + })); + } + async shutdown() { + if (!this.isInitialized) { + this.logger.warn('Queue service not initialized, nothing to shutdown'); + return; + } + this.logger.info('Shutting down queue service'); + + // Close all workers + this.logger.info(`Closing ${this.workers.length} workers...`); + await Promise.all(this.workers.map((worker, index) => { + this.logger.debug(`Closing worker ${index + 1}`); + return worker.close(); + })); + + await this.queue.close(); + await this.queueEvents.close(); + this.isInitialized = false; + this.logger.info('Queue service shutdown complete'); + } +} + +export const queueManager = new QueueService(); diff --git a/apps/data-service/src/utils/batch-processor.ts b/apps/data-service/src/utils/batch-processor.ts index a34ed07..5245c21 100644 --- a/apps/data-service/src/utils/batch-processor.ts +++ b/apps/data-service/src/utils/batch-processor.ts @@ -1,293 +1,293 @@ -import { getLogger } from '@stock-bot/logger'; - -export interface BatchConfig { - items: T[]; - batchSize?: number; // Optional - only used for batch mode - totalDelayMs: number; - jobNamePrefix: string; - operation: string; - service: string; - provider: string; - priority?: number; - createJobData: (item: T, index: number) => any; - removeOnComplete?: number; - removeOnFail?: number; - useBatching?: boolean; // Simple flag to choose mode -} - -const logger = getLogger('batch-processor'); - -export class BatchProcessor { - constructor(private queueManager: any) {} - - /** - * Unified method that handles both direct and batch approaches - */ - async processItems(config: BatchConfig) { - const { items, useBatching = false } = config; - - if (items.length === 0) { - return { totalItems: 0, jobsCreated: 0 }; - } - - if (useBatching) { - return await this.createBatchJobs(config); - } else { - return await this.createDirectJobs(config); - } - } - - private async createDirectJobs(config: BatchConfig) { - const { - items, - totalDelayMs, - jobNamePrefix, - operation, - service, - provider, - priority = 2, - createJobData, - removeOnComplete = 5, - removeOnFail = 3 - } = config; - - const delayPerItem = Math.floor(totalDelayMs / items.length); - const chunkSize = 100; - let totalJobsCreated = 0; - - logger.info('Creating direct jobs', { - totalItems: items.length, - delayPerItem: `${(delayPerItem / 1000).toFixed(1)}s`, - estimatedDuration: `${(totalDelayMs / 1000 / 60 / 60).toFixed(1)} hours` - }); - - // Process in chunks to avoid overwhelming Redis - for (let i = 0; i < items.length; i += chunkSize) { - const chunk = items.slice(i, i + chunkSize); - - const jobs = chunk.map((item, chunkIndex) => { - const globalIndex = i + chunkIndex; - return { - name: `${jobNamePrefix}-processing`, - data: { - type: `${jobNamePrefix}-processing`, - service, - provider, - operation, - payload: createJobData(item, globalIndex), - priority - }, - opts: { - delay: globalIndex * delayPerItem, - jobId: `${jobNamePrefix}-${globalIndex}-${Date.now()}`, - removeOnComplete, - removeOnFail - } - }; - }); - - try { - const createdJobs = await this.queueManager.queue.addBulk(jobs); - totalJobsCreated += createdJobs.length; - - // Log progress every 500 jobs - if (totalJobsCreated % 500 === 0 || i + chunkSize >= items.length) { - logger.info('Direct job creation progress', { - created: totalJobsCreated, - total: items.length, - percentage: `${((totalJobsCreated / items.length) * 100).toFixed(1)}%` - }); - } - } catch (error) { - logger.error('Failed to create job chunk', { - startIndex: i, - chunkSize: chunk.length, - error: error instanceof Error ? error.message : String(error) - }); - } - } - - return { - totalItems: items.length, - jobsCreated: totalJobsCreated, - mode: 'direct' - }; - } - - private async createBatchJobs(config: BatchConfig) { - const { - items, - batchSize = 200, - totalDelayMs, - jobNamePrefix, - operation, - service, - provider, - priority = 3 - } = config; - - const totalBatches = Math.ceil(items.length / batchSize); - const delayPerBatch = Math.floor(totalDelayMs / totalBatches); - const chunkSize = 50; // Create batch jobs in chunks - let batchJobsCreated = 0; - - logger.info('Creating batch jobs', { - totalItems: items.length, - batchSize, - totalBatches, - delayPerBatch: `${(delayPerBatch / 1000 / 60).toFixed(2)} minutes` - }); - - // Create batch jobs in chunks - for (let chunkStart = 0; chunkStart < totalBatches; chunkStart += chunkSize) { - const chunkEnd = Math.min(chunkStart + chunkSize, totalBatches); - const batchJobs = []; - - for (let batchIndex = chunkStart; batchIndex < chunkEnd; batchIndex++) { - const startIndex = batchIndex * batchSize; - const endIndex = Math.min(startIndex + batchSize, items.length); - const batchItems = items.slice(startIndex, endIndex); - - batchJobs.push({ - name: `${jobNamePrefix}-batch-processing`, - data: { - type: `${jobNamePrefix}-batch-processing`, - service, - provider, - operation: `process-${jobNamePrefix}-batch`, - payload: { - items: batchItems, - batchIndex, - total: totalBatches, - config: { ...config, priority: priority - 1 } - }, - priority - }, - opts: { - delay: batchIndex * delayPerBatch, - jobId: `${jobNamePrefix}-batch-${batchIndex}-${Date.now()}` - } - }); - } - - try { - const createdJobs = await this.queueManager.queue.addBulk(batchJobs); - batchJobsCreated += createdJobs.length; - - logger.info('Batch chunk created', { - chunkStart: chunkStart + 1, - chunkEnd, - created: createdJobs.length, - totalCreated: batchJobsCreated, - progress: `${((chunkEnd / totalBatches) * 100).toFixed(1)}%` - }); - } catch (error) { - logger.error('Failed to create batch chunk', { - chunkStart, - chunkEnd, - error: error instanceof Error ? error.message : String(error) - }); - } - - // Small delay between chunks - if (chunkEnd < totalBatches) { - await new Promise(resolve => setTimeout(resolve, 100)); - } - } - - return { - totalItems: items.length, - batchJobsCreated, - totalBatches, - estimatedDurationHours: totalDelayMs / 1000 / 60 / 60, - mode: 'batch' - }; - } - - /** - * Process a batch (called by batch jobs) - */ - async processBatch(payload: { - items: T[]; - batchIndex: number; - total: number; - config: BatchConfig; - }, createJobData?: (item: T, index: number) => any) { - const { items, batchIndex, total, config } = payload; - - logger.info('Processing batch', { - batchIndex, - batchSize: items.length, - total, - progress: `${((batchIndex + 1) / total * 100).toFixed(2)}%` - }); - - const totalBatchDelayMs = config.totalDelayMs / total; - const delayPerItem = Math.floor(totalBatchDelayMs / items.length); - - const jobs = items.map((item, itemIndex) => { - // Use the provided createJobData function or fall back to config - const jobDataFn = createJobData || config.createJobData; - - if (!jobDataFn) { - throw new Error('createJobData function is required'); - } - - const userData = jobDataFn(item, itemIndex); - - return { - name: `${config.jobNamePrefix}-processing`, - data: { - type: `${config.jobNamePrefix}-processing`, - service: config.service, - provider: config.provider, - operation: config.operation, - payload: { - ...userData, - batchIndex, - itemIndex, - total, - source: userData.source || 'batch-processing' - }, - priority: config.priority || 2 - }, - opts: { - delay: itemIndex * delayPerItem, - jobId: `${config.jobNamePrefix}-${batchIndex}-${itemIndex}-${Date.now()}`, - removeOnComplete: config.removeOnComplete || 5, - removeOnFail: config.removeOnFail || 3 - } - }; - }); - - try { - const createdJobs = await this.queueManager.queue.addBulk(jobs); - - logger.info('Batch processing completed', { - batchIndex, - totalItems: items.length, - jobsCreated: createdJobs.length, - progress: `${((batchIndex + 1) / total * 100).toFixed(2)}%` - }); - - return { - batchIndex, - totalItems: items.length, - jobsCreated: createdJobs.length, - jobsFailed: 0 - }; - } catch (error) { - logger.error('Failed to process batch', { - batchIndex, - error: error instanceof Error ? error.message : String(error) - }); - - return { - batchIndex, - totalItems: items.length, - jobsCreated: 0, - jobsFailed: items.length - }; - } - } +import { getLogger } from '@stock-bot/logger'; + +export interface BatchConfig { + items: T[]; + batchSize?: number; // Optional - only used for batch mode + totalDelayMs: number; + jobNamePrefix: string; + operation: string; + service: string; + provider: string; + priority?: number; + createJobData: (item: T, index: number) => any; + removeOnComplete?: number; + removeOnFail?: number; + useBatching?: boolean; // Simple flag to choose mode +} + +const logger = getLogger('batch-processor'); + +export class BatchProcessor { + constructor(private queueManager: any) {} + + /** + * Unified method that handles both direct and batch approaches + */ + async processItems(config: BatchConfig) { + const { items, useBatching = false } = config; + + if (items.length === 0) { + return { totalItems: 0, jobsCreated: 0 }; + } + + if (useBatching) { + return await this.createBatchJobs(config); + } else { + return await this.createDirectJobs(config); + } + } + + private async createDirectJobs(config: BatchConfig) { + const { + items, + totalDelayMs, + jobNamePrefix, + operation, + service, + provider, + priority = 2, + createJobData, + removeOnComplete = 5, + removeOnFail = 3 + } = config; + + const delayPerItem = Math.floor(totalDelayMs / items.length); + const chunkSize = 100; + let totalJobsCreated = 0; + + logger.info('Creating direct jobs', { + totalItems: items.length, + delayPerItem: `${(delayPerItem / 1000).toFixed(1)}s`, + estimatedDuration: `${(totalDelayMs / 1000 / 60 / 60).toFixed(1)} hours` + }); + + // Process in chunks to avoid overwhelming Redis + for (let i = 0; i < items.length; i += chunkSize) { + const chunk = items.slice(i, i + chunkSize); + + const jobs = chunk.map((item, chunkIndex) => { + const globalIndex = i + chunkIndex; + return { + name: `${jobNamePrefix}-processing`, + data: { + type: `${jobNamePrefix}-processing`, + service, + provider, + operation, + payload: createJobData(item, globalIndex), + priority + }, + opts: { + delay: globalIndex * delayPerItem, + jobId: `${jobNamePrefix}-${globalIndex}-${Date.now()}`, + removeOnComplete, + removeOnFail + } + }; + }); + + try { + const createdJobs = await this.queueManager.queue.addBulk(jobs); + totalJobsCreated += createdJobs.length; + + // Log progress every 500 jobs + if (totalJobsCreated % 500 === 0 || i + chunkSize >= items.length) { + logger.info('Direct job creation progress', { + created: totalJobsCreated, + total: items.length, + percentage: `${((totalJobsCreated / items.length) * 100).toFixed(1)}%` + }); + } + } catch (error) { + logger.error('Failed to create job chunk', { + startIndex: i, + chunkSize: chunk.length, + error: error instanceof Error ? error.message : String(error) + }); + } + } + + return { + totalItems: items.length, + jobsCreated: totalJobsCreated, + mode: 'direct' + }; + } + + private async createBatchJobs(config: BatchConfig) { + const { + items, + batchSize = 200, + totalDelayMs, + jobNamePrefix, + operation, + service, + provider, + priority = 3 + } = config; + + const totalBatches = Math.ceil(items.length / batchSize); + const delayPerBatch = Math.floor(totalDelayMs / totalBatches); + const chunkSize = 50; // Create batch jobs in chunks + let batchJobsCreated = 0; + + logger.info('Creating batch jobs', { + totalItems: items.length, + batchSize, + totalBatches, + delayPerBatch: `${(delayPerBatch / 1000 / 60).toFixed(2)} minutes` + }); + + // Create batch jobs in chunks + for (let chunkStart = 0; chunkStart < totalBatches; chunkStart += chunkSize) { + const chunkEnd = Math.min(chunkStart + chunkSize, totalBatches); + const batchJobs = []; + + for (let batchIndex = chunkStart; batchIndex < chunkEnd; batchIndex++) { + const startIndex = batchIndex * batchSize; + const endIndex = Math.min(startIndex + batchSize, items.length); + const batchItems = items.slice(startIndex, endIndex); + + batchJobs.push({ + name: `${jobNamePrefix}-batch-processing`, + data: { + type: `${jobNamePrefix}-batch-processing`, + service, + provider, + operation: `process-${jobNamePrefix}-batch`, + payload: { + items: batchItems, + batchIndex, + total: totalBatches, + config: { ...config, priority: priority - 1 } + }, + priority + }, + opts: { + delay: batchIndex * delayPerBatch, + jobId: `${jobNamePrefix}-batch-${batchIndex}-${Date.now()}` + } + }); + } + + try { + const createdJobs = await this.queueManager.queue.addBulk(batchJobs); + batchJobsCreated += createdJobs.length; + + logger.info('Batch chunk created', { + chunkStart: chunkStart + 1, + chunkEnd, + created: createdJobs.length, + totalCreated: batchJobsCreated, + progress: `${((chunkEnd / totalBatches) * 100).toFixed(1)}%` + }); + } catch (error) { + logger.error('Failed to create batch chunk', { + chunkStart, + chunkEnd, + error: error instanceof Error ? error.message : String(error) + }); + } + + // Small delay between chunks + if (chunkEnd < totalBatches) { + await new Promise(resolve => setTimeout(resolve, 100)); + } + } + + return { + totalItems: items.length, + batchJobsCreated, + totalBatches, + estimatedDurationHours: totalDelayMs / 1000 / 60 / 60, + mode: 'batch' + }; + } + + /** + * Process a batch (called by batch jobs) + */ + async processBatch(payload: { + items: T[]; + batchIndex: number; + total: number; + config: BatchConfig; + }, createJobData?: (item: T, index: number) => any) { + const { items, batchIndex, total, config } = payload; + + logger.info('Processing batch', { + batchIndex, + batchSize: items.length, + total, + progress: `${((batchIndex + 1) / total * 100).toFixed(2)}%` + }); + + const totalBatchDelayMs = config.totalDelayMs / total; + const delayPerItem = Math.floor(totalBatchDelayMs / items.length); + + const jobs = items.map((item, itemIndex) => { + // Use the provided createJobData function or fall back to config + const jobDataFn = createJobData || config.createJobData; + + if (!jobDataFn) { + throw new Error('createJobData function is required'); + } + + const userData = jobDataFn(item, itemIndex); + + return { + name: `${config.jobNamePrefix}-processing`, + data: { + type: `${config.jobNamePrefix}-processing`, + service: config.service, + provider: config.provider, + operation: config.operation, + payload: { + ...userData, + batchIndex, + itemIndex, + total, + source: userData.source || 'batch-processing' + }, + priority: config.priority || 2 + }, + opts: { + delay: itemIndex * delayPerItem, + jobId: `${config.jobNamePrefix}-${batchIndex}-${itemIndex}-${Date.now()}`, + removeOnComplete: config.removeOnComplete || 5, + removeOnFail: config.removeOnFail || 3 + } + }; + }); + + try { + const createdJobs = await this.queueManager.queue.addBulk(jobs); + + logger.info('Batch processing completed', { + batchIndex, + totalItems: items.length, + jobsCreated: createdJobs.length, + progress: `${((batchIndex + 1) / total * 100).toFixed(2)}%` + }); + + return { + batchIndex, + totalItems: items.length, + jobsCreated: createdJobs.length, + jobsFailed: 0 + }; + } catch (error) { + logger.error('Failed to process batch', { + batchIndex, + error: error instanceof Error ? error.message : String(error) + }); + + return { + batchIndex, + totalItems: items.length, + jobsCreated: 0, + jobsFailed: items.length + }; + } + } } \ No newline at end of file diff --git a/apps/data-service/tsconfig.json b/apps/data-service/tsconfig.json index f628a18..7cf025e 100644 --- a/apps/data-service/tsconfig.json +++ b/apps/data-service/tsconfig.json @@ -1,20 +1,20 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], - "references": [ - { "path": "../../libs/types" }, - { "path": "../../libs/config" }, - { "path": "../../libs/logger" }, - { "path": "../../libs/http" }, - { "path": "../../libs/cache" }, - { "path": "../../libs/questdb-client" }, - { "path": "../../libs/mongodb-client" }, - { "path": "../../libs/event-bus" }, - { "path": "../../libs/shutdown" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], + "references": [ + { "path": "../../libs/types" }, + { "path": "../../libs/config" }, + { "path": "../../libs/logger" }, + { "path": "../../libs/http" }, + { "path": "../../libs/cache" }, + { "path": "../../libs/questdb-client" }, + { "path": "../../libs/mongodb-client" }, + { "path": "../../libs/event-bus" }, + { "path": "../../libs/shutdown" } + ] +} diff --git a/apps/data-service/turbo.json b/apps/data-service/turbo.json index a050a5e..374c623 100644 --- a/apps/data-service/turbo.json +++ b/apps/data-service/turbo.json @@ -1,19 +1,19 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": [ - "@stock-bot/cache#build", - "@stock-bot/config#build", - "@stock-bot/event-bus#build", - "@stock-bot/http#build", - "@stock-bot/logger#build", - "@stock-bot/mongodb-client#build", - "@stock-bot/questdb-client#build", - "@stock-bot/shutdown#build" - ], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": [ + "@stock-bot/cache#build", + "@stock-bot/config#build", + "@stock-bot/event-bus#build", + "@stock-bot/http#build", + "@stock-bot/logger#build", + "@stock-bot/mongodb-client#build", + "@stock-bot/questdb-client#build", + "@stock-bot/shutdown#build" + ], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/apps/execution-service/package.json b/apps/execution-service/package.json index efdf413..1a4e5f8 100644 --- a/apps/execution-service/package.json +++ b/apps/execution-service/package.json @@ -1,37 +1,37 @@ -{ - "name": "@stock-bot/execution-service", - "version": "1.0.0", - "description": "Execution service for stock trading bot - handles order execution and broker integration", - "main": "dist/index.js", - "type": "module", - "scripts": { - "build": "tsc", - "devvvvv": "bun --watch src/index.ts", - "start": "bun src/index.ts", - "test": "bun test", - "lint": "eslint src --ext .ts", - "type-check": "tsc --noEmit" - }, - "dependencies": { - "@hono/node-server": "^1.12.0", - "hono": "^4.6.1", - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "@stock-bot/event-bus": "*", - "@stock-bot/utils": "*" - }, - "devDependencies": { - "@types/node": "^22.5.0", - "typescript": "^5.5.4" - }, - "keywords": [ - "trading", - "execution", - "broker", - "orders", - "stock-bot" - ], - "author": "Stock Bot Team", - "license": "MIT" -} +{ + "name": "@stock-bot/execution-service", + "version": "1.0.0", + "description": "Execution service for stock trading bot - handles order execution and broker integration", + "main": "dist/index.js", + "type": "module", + "scripts": { + "build": "tsc", + "devvvvv": "bun --watch src/index.ts", + "start": "bun src/index.ts", + "test": "bun test", + "lint": "eslint src --ext .ts", + "type-check": "tsc --noEmit" + }, + "dependencies": { + "@hono/node-server": "^1.12.0", + "hono": "^4.6.1", + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "@stock-bot/event-bus": "*", + "@stock-bot/utils": "*" + }, + "devDependencies": { + "@types/node": "^22.5.0", + "typescript": "^5.5.4" + }, + "keywords": [ + "trading", + "execution", + "broker", + "orders", + "stock-bot" + ], + "author": "Stock Bot Team", + "license": "MIT" +} diff --git a/apps/execution-service/src/broker/interface.ts b/apps/execution-service/src/broker/interface.ts index 790b794..f81b1c8 100644 --- a/apps/execution-service/src/broker/interface.ts +++ b/apps/execution-service/src/broker/interface.ts @@ -1,94 +1,94 @@ -import { Order, OrderResult, OrderStatus } from '@stock-bot/types'; - -export interface BrokerInterface { - /** - * Execute an order with the broker - */ - executeOrder(order: Order): Promise; - - /** - * Get order status from broker - */ - getOrderStatus(orderId: string): Promise; - - /** - * Cancel an order - */ - cancelOrder(orderId: string): Promise; - - /** - * Get current positions - */ - getPositions(): Promise; - - /** - * Get account balance - */ - getAccountBalance(): Promise; -} - -export interface Position { - symbol: string; - quantity: number; - averagePrice: number; - currentPrice: number; - unrealizedPnL: number; - side: 'long' | 'short'; -} - -export interface AccountBalance { - totalValue: number; - availableCash: number; - buyingPower: number; - marginUsed: number; -} - -export class MockBroker implements BrokerInterface { - private orders: Map = new Map(); - private positions: Position[] = []; - - async executeOrder(order: Order): Promise { - const orderId = `mock_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; - - const result: OrderResult = { - orderId, - symbol: order.symbol, - quantity: order.quantity, - side: order.side, - status: 'filled', - executedPrice: order.price || 100, // Mock price - executedAt: new Date(), - commission: 1.0 - }; - - this.orders.set(orderId, result); - return result; - } - - async getOrderStatus(orderId: string): Promise { - const order = this.orders.get(orderId); - return order?.status || 'unknown'; - } - - async cancelOrder(orderId: string): Promise { - const order = this.orders.get(orderId); - if (order && order.status === 'pending') { - order.status = 'cancelled'; - return true; - } - return false; - } - - async getPositions(): Promise { - return this.positions; - } - - async getAccountBalance(): Promise { - return { - totalValue: 100000, - availableCash: 50000, - buyingPower: 200000, - marginUsed: 0 - }; - } -} +import { Order, OrderResult, OrderStatus } from '@stock-bot/types'; + +export interface BrokerInterface { + /** + * Execute an order with the broker + */ + executeOrder(order: Order): Promise; + + /** + * Get order status from broker + */ + getOrderStatus(orderId: string): Promise; + + /** + * Cancel an order + */ + cancelOrder(orderId: string): Promise; + + /** + * Get current positions + */ + getPositions(): Promise; + + /** + * Get account balance + */ + getAccountBalance(): Promise; +} + +export interface Position { + symbol: string; + quantity: number; + averagePrice: number; + currentPrice: number; + unrealizedPnL: number; + side: 'long' | 'short'; +} + +export interface AccountBalance { + totalValue: number; + availableCash: number; + buyingPower: number; + marginUsed: number; +} + +export class MockBroker implements BrokerInterface { + private orders: Map = new Map(); + private positions: Position[] = []; + + async executeOrder(order: Order): Promise { + const orderId = `mock_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; + + const result: OrderResult = { + orderId, + symbol: order.symbol, + quantity: order.quantity, + side: order.side, + status: 'filled', + executedPrice: order.price || 100, // Mock price + executedAt: new Date(), + commission: 1.0 + }; + + this.orders.set(orderId, result); + return result; + } + + async getOrderStatus(orderId: string): Promise { + const order = this.orders.get(orderId); + return order?.status || 'unknown'; + } + + async cancelOrder(orderId: string): Promise { + const order = this.orders.get(orderId); + if (order && order.status === 'pending') { + order.status = 'cancelled'; + return true; + } + return false; + } + + async getPositions(): Promise { + return this.positions; + } + + async getAccountBalance(): Promise { + return { + totalValue: 100000, + availableCash: 50000, + buyingPower: 200000, + marginUsed: 0 + }; + } +} diff --git a/apps/execution-service/src/execution/order-manager.ts b/apps/execution-service/src/execution/order-manager.ts index 4e6d006..f7ec57d 100644 --- a/apps/execution-service/src/execution/order-manager.ts +++ b/apps/execution-service/src/execution/order-manager.ts @@ -1,57 +1,57 @@ -import { Order, OrderResult } from '@stock-bot/types'; -import { logger } from '@stock-bot/logger'; -import { BrokerInterface } from '../broker/interface.ts'; - -export class OrderManager { - private broker: BrokerInterface; - private pendingOrders: Map = new Map(); - - constructor(broker: BrokerInterface) { - this.broker = broker; - } - - async executeOrder(order: Order): Promise { - try { - logger.info(`Executing order: ${order.symbol} ${order.side} ${order.quantity} @ ${order.price}`); - - // Add to pending orders - const orderId = `order_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; - this.pendingOrders.set(orderId, order); - - // Execute with broker - const result = await this.broker.executeOrder(order); - - // Remove from pending - this.pendingOrders.delete(orderId); - - logger.info(`Order executed successfully: ${result.orderId}`); - return result; - - } catch (error) { - logger.error('Order execution failed', error); - throw error; - } - } - - async cancelOrder(orderId: string): Promise { - try { - const success = await this.broker.cancelOrder(orderId); - if (success) { - this.pendingOrders.delete(orderId); - logger.info(`Order cancelled: ${orderId}`); - } - return success; - } catch (error) { - logger.error('Order cancellation failed', error); - throw error; - } - } - - async getOrderStatus(orderId: string) { - return await this.broker.getOrderStatus(orderId); - } - - getPendingOrders(): Order[] { - return Array.from(this.pendingOrders.values()); - } -} +import { Order, OrderResult } from '@stock-bot/types'; +import { logger } from '@stock-bot/logger'; +import { BrokerInterface } from '../broker/interface.ts'; + +export class OrderManager { + private broker: BrokerInterface; + private pendingOrders: Map = new Map(); + + constructor(broker: BrokerInterface) { + this.broker = broker; + } + + async executeOrder(order: Order): Promise { + try { + logger.info(`Executing order: ${order.symbol} ${order.side} ${order.quantity} @ ${order.price}`); + + // Add to pending orders + const orderId = `order_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; + this.pendingOrders.set(orderId, order); + + // Execute with broker + const result = await this.broker.executeOrder(order); + + // Remove from pending + this.pendingOrders.delete(orderId); + + logger.info(`Order executed successfully: ${result.orderId}`); + return result; + + } catch (error) { + logger.error('Order execution failed', error); + throw error; + } + } + + async cancelOrder(orderId: string): Promise { + try { + const success = await this.broker.cancelOrder(orderId); + if (success) { + this.pendingOrders.delete(orderId); + logger.info(`Order cancelled: ${orderId}`); + } + return success; + } catch (error) { + logger.error('Order cancellation failed', error); + throw error; + } + } + + async getOrderStatus(orderId: string) { + return await this.broker.getOrderStatus(orderId); + } + + getPendingOrders(): Order[] { + return Array.from(this.pendingOrders.values()); + } +} diff --git a/apps/execution-service/src/execution/risk-manager.ts b/apps/execution-service/src/execution/risk-manager.ts index 6b34bc9..6dd978e 100644 --- a/apps/execution-service/src/execution/risk-manager.ts +++ b/apps/execution-service/src/execution/risk-manager.ts @@ -1,111 +1,111 @@ -import { Order } from '@stock-bot/types'; -import { getLogger } from '@stock-bot/logger'; - -export interface RiskRule { - name: string; - validate(order: Order, context: RiskContext): Promise; -} - -export interface RiskContext { - currentPositions: Map; - accountBalance: number; - totalExposure: number; - maxPositionSize: number; - maxDailyLoss: number; -} - -export interface RiskValidationResult { - isValid: boolean; - reason?: string; - severity: 'info' | 'warning' | 'error'; -} - -export class RiskManager { - private logger = getLogger('risk-manager'); - private rules: RiskRule[] = []; - - constructor() { - this.initializeDefaultRules(); - } - - addRule(rule: RiskRule): void { - this.rules.push(rule); - } - - async validateOrder(order: Order, context: RiskContext): Promise { - for (const rule of this.rules) { - const result = await rule.validate(order, context); - if (!result.isValid) { - logger.warn(`Risk rule violation: ${rule.name}`, { - order, - reason: result.reason - }); - return result; - } - } - - return { isValid: true, severity: 'info' }; - } - - private initializeDefaultRules(): void { - // Position size rule - this.addRule({ - name: 'MaxPositionSize', - async validate(order: Order, context: RiskContext): Promise { - const orderValue = order.quantity * (order.price || 0); - - if (orderValue > context.maxPositionSize) { - return { - isValid: false, - reason: `Order size ${orderValue} exceeds maximum position size ${context.maxPositionSize}`, - severity: 'error' - }; - } - - return { isValid: true, severity: 'info' }; - } - }); - - // Balance check rule - this.addRule({ - name: 'SufficientBalance', - async validate(order: Order, context: RiskContext): Promise { - const orderValue = order.quantity * (order.price || 0); - - if (order.side === 'buy' && orderValue > context.accountBalance) { - return { - isValid: false, - reason: `Insufficient balance: need ${orderValue}, have ${context.accountBalance}`, - severity: 'error' - }; - } - - return { isValid: true, severity: 'info' }; - } - }); - - // Concentration risk rule - this.addRule({ - name: 'ConcentrationLimit', - async validate(order: Order, context: RiskContext): Promise { - const currentPosition = context.currentPositions.get(order.symbol) || 0; - const newPosition = order.side === 'buy' ? - currentPosition + order.quantity : - currentPosition - order.quantity; - - const positionValue = Math.abs(newPosition) * (order.price || 0); - const concentrationRatio = positionValue / context.accountBalance; - - if (concentrationRatio > 0.25) { // 25% max concentration - return { - isValid: false, - reason: `Position concentration ${(concentrationRatio * 100).toFixed(2)}% exceeds 25% limit`, - severity: 'warning' - }; - } - - return { isValid: true, severity: 'info' }; - } - }); - } -} +import { Order } from '@stock-bot/types'; +import { getLogger } from '@stock-bot/logger'; + +export interface RiskRule { + name: string; + validate(order: Order, context: RiskContext): Promise; +} + +export interface RiskContext { + currentPositions: Map; + accountBalance: number; + totalExposure: number; + maxPositionSize: number; + maxDailyLoss: number; +} + +export interface RiskValidationResult { + isValid: boolean; + reason?: string; + severity: 'info' | 'warning' | 'error'; +} + +export class RiskManager { + private logger = getLogger('risk-manager'); + private rules: RiskRule[] = []; + + constructor() { + this.initializeDefaultRules(); + } + + addRule(rule: RiskRule): void { + this.rules.push(rule); + } + + async validateOrder(order: Order, context: RiskContext): Promise { + for (const rule of this.rules) { + const result = await rule.validate(order, context); + if (!result.isValid) { + logger.warn(`Risk rule violation: ${rule.name}`, { + order, + reason: result.reason + }); + return result; + } + } + + return { isValid: true, severity: 'info' }; + } + + private initializeDefaultRules(): void { + // Position size rule + this.addRule({ + name: 'MaxPositionSize', + async validate(order: Order, context: RiskContext): Promise { + const orderValue = order.quantity * (order.price || 0); + + if (orderValue > context.maxPositionSize) { + return { + isValid: false, + reason: `Order size ${orderValue} exceeds maximum position size ${context.maxPositionSize}`, + severity: 'error' + }; + } + + return { isValid: true, severity: 'info' }; + } + }); + + // Balance check rule + this.addRule({ + name: 'SufficientBalance', + async validate(order: Order, context: RiskContext): Promise { + const orderValue = order.quantity * (order.price || 0); + + if (order.side === 'buy' && orderValue > context.accountBalance) { + return { + isValid: false, + reason: `Insufficient balance: need ${orderValue}, have ${context.accountBalance}`, + severity: 'error' + }; + } + + return { isValid: true, severity: 'info' }; + } + }); + + // Concentration risk rule + this.addRule({ + name: 'ConcentrationLimit', + async validate(order: Order, context: RiskContext): Promise { + const currentPosition = context.currentPositions.get(order.symbol) || 0; + const newPosition = order.side === 'buy' ? + currentPosition + order.quantity : + currentPosition - order.quantity; + + const positionValue = Math.abs(newPosition) * (order.price || 0); + const concentrationRatio = positionValue / context.accountBalance; + + if (concentrationRatio > 0.25) { // 25% max concentration + return { + isValid: false, + reason: `Position concentration ${(concentrationRatio * 100).toFixed(2)}% exceeds 25% limit`, + severity: 'warning' + }; + } + + return { isValid: true, severity: 'info' }; + } + }); + } +} diff --git a/apps/execution-service/src/index.ts b/apps/execution-service/src/index.ts index d23712a..be35b34 100644 --- a/apps/execution-service/src/index.ts +++ b/apps/execution-service/src/index.ts @@ -1,97 +1,97 @@ -import { Hono } from 'hono'; -import { serve } from '@hono/node-server'; -import { getLogger } from '@stock-bot/logger'; -import { config } from '@stock-bot/config'; -// import { BrokerInterface } from './broker/interface.ts'; -// import { OrderManager } from './execution/order-manager.ts'; -// import { RiskManager } from './execution/risk-manager.ts'; - -const app = new Hono(); -const logger = getLogger('execution-service'); -// Health check endpoint -app.get('/health', (c) => { - return c.json({ - status: 'healthy', - service: 'execution-service', - timestamp: new Date().toISOString() - }); -}); - -// Order execution endpoints -app.post('/orders/execute', async (c) => { - try { - const orderRequest = await c.req.json(); - logger.info('Received order execution request', orderRequest); - - // TODO: Validate order and execute - return c.json({ - orderId: `order_${Date.now()}`, - status: 'pending', - message: 'Order submitted for execution' - }); - } catch (error) { - logger.error('Order execution failed', error); - return c.json({ error: 'Order execution failed' }, 500); - } -}); - -app.get('/orders/:orderId/status', async (c) => { - const orderId = c.req.param('orderId'); - - try { - // TODO: Get order status from broker - return c.json({ - orderId, - status: 'filled', - executedAt: new Date().toISOString() - }); - } catch (error) { - logger.error('Failed to get order status', error); - return c.json({ error: 'Failed to get order status' }, 500); - } -}); - -app.post('/orders/:orderId/cancel', async (c) => { - const orderId = c.req.param('orderId'); - - try { - // TODO: Cancel order with broker - return c.json({ - orderId, - status: 'cancelled', - cancelledAt: new Date().toISOString() - }); - } catch (error) { - logger.error('Failed to cancel order', error); - return c.json({ error: 'Failed to cancel order' }, 500); - } -}); - -// Risk management endpoints -app.get('/risk/position/:symbol', async (c) => { - const symbol = c.req.param('symbol'); - - try { - // TODO: Get position risk metrics - return c.json({ - symbol, - position: 100, - exposure: 10000, - risk: 'low' - }); - } catch (error) { - logger.error('Failed to get position risk', error); - return c.json({ error: 'Failed to get position risk' }, 500); - } -}); - -const port = config.EXECUTION_SERVICE_PORT || 3004; - -logger.info(`Starting execution service on port ${port}`); - -serve({ - fetch: app.fetch, - port -}, (info) => { - logger.info(`Execution service is running on port ${info.port}`); -}); +import { Hono } from 'hono'; +import { serve } from '@hono/node-server'; +import { getLogger } from '@stock-bot/logger'; +import { config } from '@stock-bot/config'; +// import { BrokerInterface } from './broker/interface.ts'; +// import { OrderManager } from './execution/order-manager.ts'; +// import { RiskManager } from './execution/risk-manager.ts'; + +const app = new Hono(); +const logger = getLogger('execution-service'); +// Health check endpoint +app.get('/health', (c) => { + return c.json({ + status: 'healthy', + service: 'execution-service', + timestamp: new Date().toISOString() + }); +}); + +// Order execution endpoints +app.post('/orders/execute', async (c) => { + try { + const orderRequest = await c.req.json(); + logger.info('Received order execution request', orderRequest); + + // TODO: Validate order and execute + return c.json({ + orderId: `order_${Date.now()}`, + status: 'pending', + message: 'Order submitted for execution' + }); + } catch (error) { + logger.error('Order execution failed', error); + return c.json({ error: 'Order execution failed' }, 500); + } +}); + +app.get('/orders/:orderId/status', async (c) => { + const orderId = c.req.param('orderId'); + + try { + // TODO: Get order status from broker + return c.json({ + orderId, + status: 'filled', + executedAt: new Date().toISOString() + }); + } catch (error) { + logger.error('Failed to get order status', error); + return c.json({ error: 'Failed to get order status' }, 500); + } +}); + +app.post('/orders/:orderId/cancel', async (c) => { + const orderId = c.req.param('orderId'); + + try { + // TODO: Cancel order with broker + return c.json({ + orderId, + status: 'cancelled', + cancelledAt: new Date().toISOString() + }); + } catch (error) { + logger.error('Failed to cancel order', error); + return c.json({ error: 'Failed to cancel order' }, 500); + } +}); + +// Risk management endpoints +app.get('/risk/position/:symbol', async (c) => { + const symbol = c.req.param('symbol'); + + try { + // TODO: Get position risk metrics + return c.json({ + symbol, + position: 100, + exposure: 10000, + risk: 'low' + }); + } catch (error) { + logger.error('Failed to get position risk', error); + return c.json({ error: 'Failed to get position risk' }, 500); + } +}); + +const port = config.EXECUTION_SERVICE_PORT || 3004; + +logger.info(`Starting execution service on port ${port}`); + +serve({ + fetch: app.fetch, + port +}, (info) => { + logger.info(`Execution service is running on port ${info.port}`); +}); diff --git a/apps/execution-service/tsconfig.json b/apps/execution-service/tsconfig.json index 2e8476d..5aafdff 100644 --- a/apps/execution-service/tsconfig.json +++ b/apps/execution-service/tsconfig.json @@ -1,17 +1,17 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], - "references": [ - { "path": "../../libs/types" }, - { "path": "../../libs/config" }, - { "path": "../../libs/logger" }, - { "path": "../../libs/utils" }, - { "path": "../../libs/event-bus" }, - { "path": "../../libs/shutdown" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], + "references": [ + { "path": "../../libs/types" }, + { "path": "../../libs/config" }, + { "path": "../../libs/logger" }, + { "path": "../../libs/utils" }, + { "path": "../../libs/event-bus" }, + { "path": "../../libs/shutdown" } + ] +} diff --git a/apps/execution-service/turbo.json b/apps/execution-service/turbo.json index 46798ee..920f376 100644 --- a/apps/execution-service/turbo.json +++ b/apps/execution-service/turbo.json @@ -1,17 +1,17 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": [ - "@stock-bot/types#build", - "@stock-bot/config#build", - "@stock-bot/logger#build", - "@stock-bot/utils#build", - "@stock-bot/event-bus#build", - "@stock-bot/shutdown#build" - ], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": [ + "@stock-bot/types#build", + "@stock-bot/config#build", + "@stock-bot/logger#build", + "@stock-bot/utils#build", + "@stock-bot/event-bus#build", + "@stock-bot/shutdown#build" + ], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/apps/portfolio-service/package.json b/apps/portfolio-service/package.json index 6e838da..de01cbd 100644 --- a/apps/portfolio-service/package.json +++ b/apps/portfolio-service/package.json @@ -1,38 +1,38 @@ -{ - "name": "@stock-bot/portfolio-service", - "version": "1.0.0", - "description": "Portfolio service for stock trading bot - handles portfolio tracking and performance analytics", - "main": "dist/index.js", - "type": "module", - "scripts": { - "build": "tsc", - "devvvvv": "bun --watch src/index.ts", - "start": "bun src/index.ts", - "test": "bun test", - "lint": "eslint src --ext .ts", - "type-check": "tsc --noEmit" - }, - "dependencies": { - "@hono/node-server": "^1.12.0", - "hono": "^4.6.1", - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "@stock-bot/questdb-client": "*", - "@stock-bot/utils": "*", - "@stock-bot/data-frame": "*" - }, - "devDependencies": { - "@types/node": "^22.5.0", - "typescript": "^5.5.4" - }, - "keywords": [ - "trading", - "portfolio", - "performance", - "analytics", - "stock-bot" - ], - "author": "Stock Bot Team", - "license": "MIT" -} +{ + "name": "@stock-bot/portfolio-service", + "version": "1.0.0", + "description": "Portfolio service for stock trading bot - handles portfolio tracking and performance analytics", + "main": "dist/index.js", + "type": "module", + "scripts": { + "build": "tsc", + "devvvvv": "bun --watch src/index.ts", + "start": "bun src/index.ts", + "test": "bun test", + "lint": "eslint src --ext .ts", + "type-check": "tsc --noEmit" + }, + "dependencies": { + "@hono/node-server": "^1.12.0", + "hono": "^4.6.1", + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "@stock-bot/questdb-client": "*", + "@stock-bot/utils": "*", + "@stock-bot/data-frame": "*" + }, + "devDependencies": { + "@types/node": "^22.5.0", + "typescript": "^5.5.4" + }, + "keywords": [ + "trading", + "portfolio", + "performance", + "analytics", + "stock-bot" + ], + "author": "Stock Bot Team", + "license": "MIT" +} diff --git a/apps/portfolio-service/src/analytics/performance-analyzer.ts b/apps/portfolio-service/src/analytics/performance-analyzer.ts index b31579d..d18f526 100644 --- a/apps/portfolio-service/src/analytics/performance-analyzer.ts +++ b/apps/portfolio-service/src/analytics/performance-analyzer.ts @@ -1,204 +1,204 @@ -import { PortfolioSnapshot, Trade } from '../portfolio/portfolio-manager.ts'; - -export interface PerformanceMetrics { - totalReturn: number; - annualizedReturn: number; - sharpeRatio: number; - maxDrawdown: number; - volatility: number; - beta: number; - alpha: number; - calmarRatio: number; - sortinoRatio: number; -} - -export interface RiskMetrics { - var95: number; // Value at Risk (95% confidence) - cvar95: number; // Conditional Value at Risk - maxDrawdown: number; - downsideDeviation: number; - correlationMatrix: Record>; -} - -export class PerformanceAnalyzer { - private snapshots: PortfolioSnapshot[] = []; - private benchmarkReturns: number[] = []; // S&P 500 or other benchmark - - addSnapshot(snapshot: PortfolioSnapshot): void { - this.snapshots.push(snapshot); - // Keep only last 252 trading days (1 year) - if (this.snapshots.length > 252) { - this.snapshots = this.snapshots.slice(-252); - } - } - - calculatePerformanceMetrics(period: 'daily' | 'weekly' | 'monthly' = 'daily'): PerformanceMetrics { - if (this.snapshots.length < 2) { - throw new Error('Need at least 2 snapshots to calculate performance'); - } - - const returns = this.calculateReturns(period); - const riskFreeRate = 0.02; // 2% annual risk-free rate - - return { - totalReturn: this.calculateTotalReturn(), - annualizedReturn: this.calculateAnnualizedReturn(returns), - sharpeRatio: this.calculateSharpeRatio(returns, riskFreeRate), - maxDrawdown: this.calculateMaxDrawdown(), - volatility: this.calculateVolatility(returns), - beta: this.calculateBeta(returns), - alpha: this.calculateAlpha(returns, riskFreeRate), - calmarRatio: this.calculateCalmarRatio(returns), - sortinoRatio: this.calculateSortinoRatio(returns, riskFreeRate) - }; - } - - calculateRiskMetrics(): RiskMetrics { - const returns = this.calculateReturns('daily'); - - return { - var95: this.calculateVaR(returns, 0.95), - cvar95: this.calculateCVaR(returns, 0.95), - maxDrawdown: this.calculateMaxDrawdown(), - downsideDeviation: this.calculateDownsideDeviation(returns), - correlationMatrix: {} // TODO: Implement correlation matrix - }; - } - - private calculateReturns(period: 'daily' | 'weekly' | 'monthly'): number[] { - if (this.snapshots.length < 2) return []; - - const returns: number[] = []; - - for (let i = 1; i < this.snapshots.length; i++) { - const currentValue = this.snapshots[i].totalValue; - const previousValue = this.snapshots[i - 1].totalValue; - const return_ = (currentValue - previousValue) / previousValue; - returns.push(return_); - } - - return returns; - } - - private calculateTotalReturn(): number { - if (this.snapshots.length < 2) return 0; - - const firstValue = this.snapshots[0].totalValue; - const lastValue = this.snapshots[this.snapshots.length - 1].totalValue; - - return (lastValue - firstValue) / firstValue; - } - - private calculateAnnualizedReturn(returns: number[]): number { - if (returns.length === 0) return 0; - - const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - return Math.pow(1 + avgReturn, 252) - 1; // 252 trading days per year - } - - private calculateVolatility(returns: number[]): number { - if (returns.length === 0) return 0; - - const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - avgReturn, 2), 0) / returns.length; - - return Math.sqrt(variance * 252); // Annualized volatility - } - - private calculateSharpeRatio(returns: number[], riskFreeRate: number): number { - if (returns.length === 0) return 0; - - const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const annualizedReturn = Math.pow(1 + avgReturn, 252) - 1; - const volatility = this.calculateVolatility(returns); - - if (volatility === 0) return 0; - - return (annualizedReturn - riskFreeRate) / volatility; - } - - private calculateMaxDrawdown(): number { - if (this.snapshots.length === 0) return 0; - - let maxDrawdown = 0; - let peak = this.snapshots[0].totalValue; - - for (const snapshot of this.snapshots) { - if (snapshot.totalValue > peak) { - peak = snapshot.totalValue; - } - - const drawdown = (peak - snapshot.totalValue) / peak; - maxDrawdown = Math.max(maxDrawdown, drawdown); - } - - return maxDrawdown; - } - - private calculateBeta(returns: number[]): number { - if (returns.length === 0 || this.benchmarkReturns.length === 0) return 1.0; - - // Simple beta calculation - would need actual benchmark data - return 1.0; // Placeholder - } - - private calculateAlpha(returns: number[], riskFreeRate: number): number { - const beta = this.calculateBeta(returns); - const portfolioReturn = this.calculateAnnualizedReturn(returns); - const benchmarkReturn = 0.10; // 10% benchmark return (placeholder) - - return portfolioReturn - (riskFreeRate + beta * (benchmarkReturn - riskFreeRate)); - } - - private calculateCalmarRatio(returns: number[]): number { - const annualizedReturn = this.calculateAnnualizedReturn(returns); - const maxDrawdown = this.calculateMaxDrawdown(); - - if (maxDrawdown === 0) return 0; - - return annualizedReturn / maxDrawdown; - } - - private calculateSortinoRatio(returns: number[], riskFreeRate: number): number { - const annualizedReturn = this.calculateAnnualizedReturn(returns); - const downsideDeviation = this.calculateDownsideDeviation(returns); - - if (downsideDeviation === 0) return 0; - - return (annualizedReturn - riskFreeRate) / downsideDeviation; - } - - private calculateDownsideDeviation(returns: number[]): number { - if (returns.length === 0) return 0; - - const negativeReturns = returns.filter(ret => ret < 0); - if (negativeReturns.length === 0) return 0; - - const avgNegativeReturn = negativeReturns.reduce((sum, ret) => sum + ret, 0) / negativeReturns.length; - const variance = negativeReturns.reduce((sum, ret) => sum + Math.pow(ret - avgNegativeReturn, 2), 0) / negativeReturns.length; - - return Math.sqrt(variance * 252); // Annualized - } - - private calculateVaR(returns: number[], confidence: number): number { - if (returns.length === 0) return 0; - - const sortedReturns = returns.slice().sort((a, b) => a - b); - const index = Math.floor((1 - confidence) * sortedReturns.length); - - return -sortedReturns[index]; // Return as positive value - } - - private calculateCVaR(returns: number[], confidence: number): number { - if (returns.length === 0) return 0; - - const sortedReturns = returns.slice().sort((a, b) => a - b); - const cutoffIndex = Math.floor((1 - confidence) * sortedReturns.length); - const tailReturns = sortedReturns.slice(0, cutoffIndex + 1); - - if (tailReturns.length === 0) return 0; - - const avgTailReturn = tailReturns.reduce((sum, ret) => sum + ret, 0) / tailReturns.length; - return -avgTailReturn; // Return as positive value - } -} +import { PortfolioSnapshot, Trade } from '../portfolio/portfolio-manager.ts'; + +export interface PerformanceMetrics { + totalReturn: number; + annualizedReturn: number; + sharpeRatio: number; + maxDrawdown: number; + volatility: number; + beta: number; + alpha: number; + calmarRatio: number; + sortinoRatio: number; +} + +export interface RiskMetrics { + var95: number; // Value at Risk (95% confidence) + cvar95: number; // Conditional Value at Risk + maxDrawdown: number; + downsideDeviation: number; + correlationMatrix: Record>; +} + +export class PerformanceAnalyzer { + private snapshots: PortfolioSnapshot[] = []; + private benchmarkReturns: number[] = []; // S&P 500 or other benchmark + + addSnapshot(snapshot: PortfolioSnapshot): void { + this.snapshots.push(snapshot); + // Keep only last 252 trading days (1 year) + if (this.snapshots.length > 252) { + this.snapshots = this.snapshots.slice(-252); + } + } + + calculatePerformanceMetrics(period: 'daily' | 'weekly' | 'monthly' = 'daily'): PerformanceMetrics { + if (this.snapshots.length < 2) { + throw new Error('Need at least 2 snapshots to calculate performance'); + } + + const returns = this.calculateReturns(period); + const riskFreeRate = 0.02; // 2% annual risk-free rate + + return { + totalReturn: this.calculateTotalReturn(), + annualizedReturn: this.calculateAnnualizedReturn(returns), + sharpeRatio: this.calculateSharpeRatio(returns, riskFreeRate), + maxDrawdown: this.calculateMaxDrawdown(), + volatility: this.calculateVolatility(returns), + beta: this.calculateBeta(returns), + alpha: this.calculateAlpha(returns, riskFreeRate), + calmarRatio: this.calculateCalmarRatio(returns), + sortinoRatio: this.calculateSortinoRatio(returns, riskFreeRate) + }; + } + + calculateRiskMetrics(): RiskMetrics { + const returns = this.calculateReturns('daily'); + + return { + var95: this.calculateVaR(returns, 0.95), + cvar95: this.calculateCVaR(returns, 0.95), + maxDrawdown: this.calculateMaxDrawdown(), + downsideDeviation: this.calculateDownsideDeviation(returns), + correlationMatrix: {} // TODO: Implement correlation matrix + }; + } + + private calculateReturns(period: 'daily' | 'weekly' | 'monthly'): number[] { + if (this.snapshots.length < 2) return []; + + const returns: number[] = []; + + for (let i = 1; i < this.snapshots.length; i++) { + const currentValue = this.snapshots[i].totalValue; + const previousValue = this.snapshots[i - 1].totalValue; + const return_ = (currentValue - previousValue) / previousValue; + returns.push(return_); + } + + return returns; + } + + private calculateTotalReturn(): number { + if (this.snapshots.length < 2) return 0; + + const firstValue = this.snapshots[0].totalValue; + const lastValue = this.snapshots[this.snapshots.length - 1].totalValue; + + return (lastValue - firstValue) / firstValue; + } + + private calculateAnnualizedReturn(returns: number[]): number { + if (returns.length === 0) return 0; + + const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + return Math.pow(1 + avgReturn, 252) - 1; // 252 trading days per year + } + + private calculateVolatility(returns: number[]): number { + if (returns.length === 0) return 0; + + const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - avgReturn, 2), 0) / returns.length; + + return Math.sqrt(variance * 252); // Annualized volatility + } + + private calculateSharpeRatio(returns: number[], riskFreeRate: number): number { + if (returns.length === 0) return 0; + + const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const annualizedReturn = Math.pow(1 + avgReturn, 252) - 1; + const volatility = this.calculateVolatility(returns); + + if (volatility === 0) return 0; + + return (annualizedReturn - riskFreeRate) / volatility; + } + + private calculateMaxDrawdown(): number { + if (this.snapshots.length === 0) return 0; + + let maxDrawdown = 0; + let peak = this.snapshots[0].totalValue; + + for (const snapshot of this.snapshots) { + if (snapshot.totalValue > peak) { + peak = snapshot.totalValue; + } + + const drawdown = (peak - snapshot.totalValue) / peak; + maxDrawdown = Math.max(maxDrawdown, drawdown); + } + + return maxDrawdown; + } + + private calculateBeta(returns: number[]): number { + if (returns.length === 0 || this.benchmarkReturns.length === 0) return 1.0; + + // Simple beta calculation - would need actual benchmark data + return 1.0; // Placeholder + } + + private calculateAlpha(returns: number[], riskFreeRate: number): number { + const beta = this.calculateBeta(returns); + const portfolioReturn = this.calculateAnnualizedReturn(returns); + const benchmarkReturn = 0.10; // 10% benchmark return (placeholder) + + return portfolioReturn - (riskFreeRate + beta * (benchmarkReturn - riskFreeRate)); + } + + private calculateCalmarRatio(returns: number[]): number { + const annualizedReturn = this.calculateAnnualizedReturn(returns); + const maxDrawdown = this.calculateMaxDrawdown(); + + if (maxDrawdown === 0) return 0; + + return annualizedReturn / maxDrawdown; + } + + private calculateSortinoRatio(returns: number[], riskFreeRate: number): number { + const annualizedReturn = this.calculateAnnualizedReturn(returns); + const downsideDeviation = this.calculateDownsideDeviation(returns); + + if (downsideDeviation === 0) return 0; + + return (annualizedReturn - riskFreeRate) / downsideDeviation; + } + + private calculateDownsideDeviation(returns: number[]): number { + if (returns.length === 0) return 0; + + const negativeReturns = returns.filter(ret => ret < 0); + if (negativeReturns.length === 0) return 0; + + const avgNegativeReturn = negativeReturns.reduce((sum, ret) => sum + ret, 0) / negativeReturns.length; + const variance = negativeReturns.reduce((sum, ret) => sum + Math.pow(ret - avgNegativeReturn, 2), 0) / negativeReturns.length; + + return Math.sqrt(variance * 252); // Annualized + } + + private calculateVaR(returns: number[], confidence: number): number { + if (returns.length === 0) return 0; + + const sortedReturns = returns.slice().sort((a, b) => a - b); + const index = Math.floor((1 - confidence) * sortedReturns.length); + + return -sortedReturns[index]; // Return as positive value + } + + private calculateCVaR(returns: number[], confidence: number): number { + if (returns.length === 0) return 0; + + const sortedReturns = returns.slice().sort((a, b) => a - b); + const cutoffIndex = Math.floor((1 - confidence) * sortedReturns.length); + const tailReturns = sortedReturns.slice(0, cutoffIndex + 1); + + if (tailReturns.length === 0) return 0; + + const avgTailReturn = tailReturns.reduce((sum, ret) => sum + ret, 0) / tailReturns.length; + return -avgTailReturn; // Return as positive value + } +} diff --git a/apps/portfolio-service/src/index.ts b/apps/portfolio-service/src/index.ts index 273dac8..a5aeca6 100644 --- a/apps/portfolio-service/src/index.ts +++ b/apps/portfolio-service/src/index.ts @@ -1,133 +1,133 @@ -import { Hono } from 'hono'; -import { serve } from '@hono/node-server'; -import { getLogger } from '@stock-bot/logger'; -import { config } from '@stock-bot/config'; -import { PortfolioManager } from './portfolio/portfolio-manager.ts'; -import { PerformanceAnalyzer } from './analytics/performance-analyzer.ts'; - -const app = new Hono(); -const logger = getLogger('portfolio-service'); -// Health check endpoint -app.get('/health', (c) => { - return c.json({ - status: 'healthy', - service: 'portfolio-service', - timestamp: new Date().toISOString() - }); -}); - -// Portfolio endpoints -app.get('/portfolio/overview', async (c) => { - try { - // TODO: Get portfolio overview - return c.json({ - totalValue: 125000, - totalReturn: 25000, - totalReturnPercent: 25.0, - dayChange: 1250, - dayChangePercent: 1.0, - positions: [] - }); - } catch (error) { - logger.error('Failed to get portfolio overview', error); - return c.json({ error: 'Failed to get portfolio overview' }, 500); - } -}); - -app.get('/portfolio/positions', async (c) => { - try { - // TODO: Get current positions - return c.json([ - { - symbol: 'AAPL', - quantity: 100, - averagePrice: 150.0, - currentPrice: 155.0, - marketValue: 15500, - unrealizedPnL: 500, - unrealizedPnLPercent: 3.33 - } - ]); - } catch (error) { - logger.error('Failed to get positions', error); - return c.json({ error: 'Failed to get positions' }, 500); - } -}); - -app.get('/portfolio/history', async (c) => { - const days = c.req.query('days') || '30'; - - try { - // TODO: Get portfolio history - return c.json({ - period: `${days} days`, - data: [] - }); - } catch (error) { - logger.error('Failed to get portfolio history', error); - return c.json({ error: 'Failed to get portfolio history' }, 500); - } -}); - -// Performance analytics endpoints -app.get('/analytics/performance', async (c) => { - const period = c.req.query('period') || '1M'; - - try { - // TODO: Calculate performance metrics - return c.json({ - period, - totalReturn: 0.25, - annualizedReturn: 0.30, - sharpeRatio: 1.5, - maxDrawdown: 0.05, - volatility: 0.15, - beta: 1.1, - alpha: 0.02 - }); - } catch (error) { - logger.error('Failed to get performance analytics', error); - return c.json({ error: 'Failed to get performance analytics' }, 500); - } -}); - -app.get('/analytics/risk', async (c) => { - try { - // TODO: Calculate risk metrics - return c.json({ - var95: 0.02, - cvar95: 0.03, - maxDrawdown: 0.05, - downside_deviation: 0.08, - correlation_matrix: {} - }); - } catch (error) { - logger.error('Failed to get risk analytics', error); - return c.json({ error: 'Failed to get risk analytics' }, 500); - } -}); - -app.get('/analytics/attribution', async (c) => { - try { - // TODO: Calculate performance attribution - return c.json({ - sector_allocation: {}, - security_selection: {}, - interaction_effect: {} - }); - } catch (error) { - logger.error('Failed to get attribution analytics', error); - return c.json({ error: 'Failed to get attribution analytics' }, 500); - } -}); - -const port = config.PORTFOLIO_SERVICE_PORT || 3005; - -logger.info(`Starting portfolio service on port ${port}`); - -serve({ - fetch: app.fetch, - port -}, (info) => { - logger.info(`Portfolio service is running on port ${info.port}`); -}); +import { Hono } from 'hono'; +import { serve } from '@hono/node-server'; +import { getLogger } from '@stock-bot/logger'; +import { config } from '@stock-bot/config'; +import { PortfolioManager } from './portfolio/portfolio-manager.ts'; +import { PerformanceAnalyzer } from './analytics/performance-analyzer.ts'; + +const app = new Hono(); +const logger = getLogger('portfolio-service'); +// Health check endpoint +app.get('/health', (c) => { + return c.json({ + status: 'healthy', + service: 'portfolio-service', + timestamp: new Date().toISOString() + }); +}); + +// Portfolio endpoints +app.get('/portfolio/overview', async (c) => { + try { + // TODO: Get portfolio overview + return c.json({ + totalValue: 125000, + totalReturn: 25000, + totalReturnPercent: 25.0, + dayChange: 1250, + dayChangePercent: 1.0, + positions: [] + }); + } catch (error) { + logger.error('Failed to get portfolio overview', error); + return c.json({ error: 'Failed to get portfolio overview' }, 500); + } +}); + +app.get('/portfolio/positions', async (c) => { + try { + // TODO: Get current positions + return c.json([ + { + symbol: 'AAPL', + quantity: 100, + averagePrice: 150.0, + currentPrice: 155.0, + marketValue: 15500, + unrealizedPnL: 500, + unrealizedPnLPercent: 3.33 + } + ]); + } catch (error) { + logger.error('Failed to get positions', error); + return c.json({ error: 'Failed to get positions' }, 500); + } +}); + +app.get('/portfolio/history', async (c) => { + const days = c.req.query('days') || '30'; + + try { + // TODO: Get portfolio history + return c.json({ + period: `${days} days`, + data: [] + }); + } catch (error) { + logger.error('Failed to get portfolio history', error); + return c.json({ error: 'Failed to get portfolio history' }, 500); + } +}); + +// Performance analytics endpoints +app.get('/analytics/performance', async (c) => { + const period = c.req.query('period') || '1M'; + + try { + // TODO: Calculate performance metrics + return c.json({ + period, + totalReturn: 0.25, + annualizedReturn: 0.30, + sharpeRatio: 1.5, + maxDrawdown: 0.05, + volatility: 0.15, + beta: 1.1, + alpha: 0.02 + }); + } catch (error) { + logger.error('Failed to get performance analytics', error); + return c.json({ error: 'Failed to get performance analytics' }, 500); + } +}); + +app.get('/analytics/risk', async (c) => { + try { + // TODO: Calculate risk metrics + return c.json({ + var95: 0.02, + cvar95: 0.03, + maxDrawdown: 0.05, + downside_deviation: 0.08, + correlation_matrix: {} + }); + } catch (error) { + logger.error('Failed to get risk analytics', error); + return c.json({ error: 'Failed to get risk analytics' }, 500); + } +}); + +app.get('/analytics/attribution', async (c) => { + try { + // TODO: Calculate performance attribution + return c.json({ + sector_allocation: {}, + security_selection: {}, + interaction_effect: {} + }); + } catch (error) { + logger.error('Failed to get attribution analytics', error); + return c.json({ error: 'Failed to get attribution analytics' }, 500); + } +}); + +const port = config.PORTFOLIO_SERVICE_PORT || 3005; + +logger.info(`Starting portfolio service on port ${port}`); + +serve({ + fetch: app.fetch, + port +}, (info) => { + logger.info(`Portfolio service is running on port ${info.port}`); +}); diff --git a/apps/portfolio-service/src/portfolio/portfolio-manager.ts b/apps/portfolio-service/src/portfolio/portfolio-manager.ts index 0222816..7818e1d 100644 --- a/apps/portfolio-service/src/portfolio/portfolio-manager.ts +++ b/apps/portfolio-service/src/portfolio/portfolio-manager.ts @@ -1,159 +1,159 @@ -import { getLogger } from '@stock-bot/logger'; - -export interface Position { - symbol: string; - quantity: number; - averagePrice: number; - currentPrice: number; - marketValue: number; - unrealizedPnL: number; - unrealizedPnLPercent: number; - costBasis: number; - lastUpdated: Date; -} - -export interface PortfolioSnapshot { - timestamp: Date; - totalValue: number; - cashBalance: number; - positions: Position[]; - totalReturn: number; - totalReturnPercent: number; - dayChange: number; - dayChangePercent: number; -} - -export interface Trade { - id: string; - symbol: string; - quantity: number; - price: number; - side: 'buy' | 'sell'; - timestamp: Date; - commission: number; -} - -export class PortfolioManager { - private logger = getLogger('PortfolioManager'); - private positions: Map = new Map(); - private trades: Trade[] = []; - private cashBalance: number = 100000; // Starting cash - - constructor(initialCash: number = 100000) { - this.cashBalance = initialCash; - } - - addTrade(trade: Trade): void { - this.trades.push(trade); - this.updatePosition(trade); - logger.info(`Trade added: ${trade.symbol} ${trade.side} ${trade.quantity} @ ${trade.price}`); - } - - private updatePosition(trade: Trade): void { - const existing = this.positions.get(trade.symbol); - - if (!existing) { - // New position - if (trade.side === 'buy') { - this.positions.set(trade.symbol, { - symbol: trade.symbol, - quantity: trade.quantity, - averagePrice: trade.price, - currentPrice: trade.price, - marketValue: trade.quantity * trade.price, - unrealizedPnL: 0, - unrealizedPnLPercent: 0, - costBasis: trade.quantity * trade.price + trade.commission, - lastUpdated: trade.timestamp - }); - this.cashBalance -= (trade.quantity * trade.price + trade.commission); - } - return; - } - - // Update existing position - if (trade.side === 'buy') { - const newQuantity = existing.quantity + trade.quantity; - const newCostBasis = existing.costBasis + (trade.quantity * trade.price) + trade.commission; - - existing.quantity = newQuantity; - existing.averagePrice = (newCostBasis - this.getTotalCommissions(trade.symbol)) / newQuantity; - existing.costBasis = newCostBasis; - existing.lastUpdated = trade.timestamp; - - this.cashBalance -= (trade.quantity * trade.price + trade.commission); - - } else if (trade.side === 'sell') { - existing.quantity -= trade.quantity; - existing.lastUpdated = trade.timestamp; - - const proceeds = trade.quantity * trade.price - trade.commission; - this.cashBalance += proceeds; - - // Remove position if quantity is zero - if (existing.quantity <= 0) { - this.positions.delete(trade.symbol); - } - } - } - - updatePrice(symbol: string, price: number): void { - const position = this.positions.get(symbol); - if (position) { - position.currentPrice = price; - position.marketValue = position.quantity * price; - position.unrealizedPnL = position.marketValue - (position.quantity * position.averagePrice); - position.unrealizedPnLPercent = position.unrealizedPnL / (position.quantity * position.averagePrice) * 100; - position.lastUpdated = new Date(); - } - } - - getPosition(symbol: string): Position | undefined { - return this.positions.get(symbol); - } - - getAllPositions(): Position[] { - return Array.from(this.positions.values()); - } - - getPortfolioSnapshot(): PortfolioSnapshot { - const positions = this.getAllPositions(); - const totalMarketValue = positions.reduce((sum, pos) => sum + pos.marketValue, 0); - const totalValue = totalMarketValue + this.cashBalance; - const totalUnrealizedPnL = positions.reduce((sum, pos) => sum + pos.unrealizedPnL, 0); - - return { - timestamp: new Date(), - totalValue, - cashBalance: this.cashBalance, - positions, - totalReturn: totalUnrealizedPnL, // Simplified - should include realized gains - totalReturnPercent: (totalUnrealizedPnL / (totalValue - totalUnrealizedPnL)) * 100, - dayChange: 0, // TODO: Calculate from previous day - dayChangePercent: 0 - }; - } - - getTrades(symbol?: string): Trade[] { - if (symbol) { - return this.trades.filter(trade => trade.symbol === symbol); - } - return this.trades; - } - - private getTotalCommissions(symbol: string): number { - return this.trades - .filter(trade => trade.symbol === symbol) - .reduce((sum, trade) => sum + trade.commission, 0); - } - - getCashBalance(): number { - return this.cashBalance; - } - - getNetLiquidationValue(): number { - const positions = this.getAllPositions(); - const positionValue = positions.reduce((sum, pos) => sum + pos.marketValue, 0); - return positionValue + this.cashBalance; - } -} +import { getLogger } from '@stock-bot/logger'; + +export interface Position { + symbol: string; + quantity: number; + averagePrice: number; + currentPrice: number; + marketValue: number; + unrealizedPnL: number; + unrealizedPnLPercent: number; + costBasis: number; + lastUpdated: Date; +} + +export interface PortfolioSnapshot { + timestamp: Date; + totalValue: number; + cashBalance: number; + positions: Position[]; + totalReturn: number; + totalReturnPercent: number; + dayChange: number; + dayChangePercent: number; +} + +export interface Trade { + id: string; + symbol: string; + quantity: number; + price: number; + side: 'buy' | 'sell'; + timestamp: Date; + commission: number; +} + +export class PortfolioManager { + private logger = getLogger('PortfolioManager'); + private positions: Map = new Map(); + private trades: Trade[] = []; + private cashBalance: number = 100000; // Starting cash + + constructor(initialCash: number = 100000) { + this.cashBalance = initialCash; + } + + addTrade(trade: Trade): void { + this.trades.push(trade); + this.updatePosition(trade); + logger.info(`Trade added: ${trade.symbol} ${trade.side} ${trade.quantity} @ ${trade.price}`); + } + + private updatePosition(trade: Trade): void { + const existing = this.positions.get(trade.symbol); + + if (!existing) { + // New position + if (trade.side === 'buy') { + this.positions.set(trade.symbol, { + symbol: trade.symbol, + quantity: trade.quantity, + averagePrice: trade.price, + currentPrice: trade.price, + marketValue: trade.quantity * trade.price, + unrealizedPnL: 0, + unrealizedPnLPercent: 0, + costBasis: trade.quantity * trade.price + trade.commission, + lastUpdated: trade.timestamp + }); + this.cashBalance -= (trade.quantity * trade.price + trade.commission); + } + return; + } + + // Update existing position + if (trade.side === 'buy') { + const newQuantity = existing.quantity + trade.quantity; + const newCostBasis = existing.costBasis + (trade.quantity * trade.price) + trade.commission; + + existing.quantity = newQuantity; + existing.averagePrice = (newCostBasis - this.getTotalCommissions(trade.symbol)) / newQuantity; + existing.costBasis = newCostBasis; + existing.lastUpdated = trade.timestamp; + + this.cashBalance -= (trade.quantity * trade.price + trade.commission); + + } else if (trade.side === 'sell') { + existing.quantity -= trade.quantity; + existing.lastUpdated = trade.timestamp; + + const proceeds = trade.quantity * trade.price - trade.commission; + this.cashBalance += proceeds; + + // Remove position if quantity is zero + if (existing.quantity <= 0) { + this.positions.delete(trade.symbol); + } + } + } + + updatePrice(symbol: string, price: number): void { + const position = this.positions.get(symbol); + if (position) { + position.currentPrice = price; + position.marketValue = position.quantity * price; + position.unrealizedPnL = position.marketValue - (position.quantity * position.averagePrice); + position.unrealizedPnLPercent = position.unrealizedPnL / (position.quantity * position.averagePrice) * 100; + position.lastUpdated = new Date(); + } + } + + getPosition(symbol: string): Position | undefined { + return this.positions.get(symbol); + } + + getAllPositions(): Position[] { + return Array.from(this.positions.values()); + } + + getPortfolioSnapshot(): PortfolioSnapshot { + const positions = this.getAllPositions(); + const totalMarketValue = positions.reduce((sum, pos) => sum + pos.marketValue, 0); + const totalValue = totalMarketValue + this.cashBalance; + const totalUnrealizedPnL = positions.reduce((sum, pos) => sum + pos.unrealizedPnL, 0); + + return { + timestamp: new Date(), + totalValue, + cashBalance: this.cashBalance, + positions, + totalReturn: totalUnrealizedPnL, // Simplified - should include realized gains + totalReturnPercent: (totalUnrealizedPnL / (totalValue - totalUnrealizedPnL)) * 100, + dayChange: 0, // TODO: Calculate from previous day + dayChangePercent: 0 + }; + } + + getTrades(symbol?: string): Trade[] { + if (symbol) { + return this.trades.filter(trade => trade.symbol === symbol); + } + return this.trades; + } + + private getTotalCommissions(symbol: string): number { + return this.trades + .filter(trade => trade.symbol === symbol) + .reduce((sum, trade) => sum + trade.commission, 0); + } + + getCashBalance(): number { + return this.cashBalance; + } + + getNetLiquidationValue(): number { + const positions = this.getAllPositions(); + const positionValue = positions.reduce((sum, pos) => sum + pos.marketValue, 0); + return positionValue + this.cashBalance; + } +} diff --git a/apps/portfolio-service/tsconfig.json b/apps/portfolio-service/tsconfig.json index f49f390..e88254b 100644 --- a/apps/portfolio-service/tsconfig.json +++ b/apps/portfolio-service/tsconfig.json @@ -1,18 +1,18 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], - "references": [ - { "path": "../../libs/types" }, - { "path": "../../libs/config" }, - { "path": "../../libs/logger" }, - { "path": "../../libs/utils" }, - { "path": "../../libs/postgres-client" }, - { "path": "../../libs/event-bus" }, - { "path": "../../libs/shutdown" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], + "references": [ + { "path": "../../libs/types" }, + { "path": "../../libs/config" }, + { "path": "../../libs/logger" }, + { "path": "../../libs/utils" }, + { "path": "../../libs/postgres-client" }, + { "path": "../../libs/event-bus" }, + { "path": "../../libs/shutdown" } + ] +} diff --git a/apps/portfolio-service/turbo.json b/apps/portfolio-service/turbo.json index 6c5e276..d0779ff 100644 --- a/apps/portfolio-service/turbo.json +++ b/apps/portfolio-service/turbo.json @@ -1,18 +1,18 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": [ - "@stock-bot/types#build", - "@stock-bot/config#build", - "@stock-bot/logger#build", - "@stock-bot/utils#build", - "@stock-bot/postgres-client#build", - "@stock-bot/event-bus#build", - "@stock-bot/shutdown#build" - ], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": [ + "@stock-bot/types#build", + "@stock-bot/config#build", + "@stock-bot/logger#build", + "@stock-bot/utils#build", + "@stock-bot/postgres-client#build", + "@stock-bot/event-bus#build", + "@stock-bot/shutdown#build" + ], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/apps/processing-service/package.json b/apps/processing-service/package.json index 1bfe342..66196a8 100644 --- a/apps/processing-service/package.json +++ b/apps/processing-service/package.json @@ -1,26 +1,26 @@ -{ - "name": "@stock-bot/processing-service", - "version": "1.0.0", - "description": "Combined data processing and technical indicators service", - "main": "dist/index.js", - "type": "module", - "scripts": { - "devvvvv": "bun --watch src/index.ts", - "build": "bun build src/index.ts --outdir dist --target node", - "start": "bun dist/index.js", - "test": "bun test", - "clean": "rm -rf dist" - }, - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "@stock-bot/utils": "*", - "@stock-bot/event-bus": "*", - "@stock-bot/vector-engine": "*", - "hono": "^4.0.0" - }, - "devDependencies": { - "typescript": "^5.0.0" - } -} +{ + "name": "@stock-bot/processing-service", + "version": "1.0.0", + "description": "Combined data processing and technical indicators service", + "main": "dist/index.js", + "type": "module", + "scripts": { + "devvvvv": "bun --watch src/index.ts", + "build": "bun build src/index.ts --outdir dist --target node", + "start": "bun dist/index.js", + "test": "bun test", + "clean": "rm -rf dist" + }, + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "@stock-bot/utils": "*", + "@stock-bot/event-bus": "*", + "@stock-bot/vector-engine": "*", + "hono": "^4.0.0" + }, + "devDependencies": { + "typescript": "^5.0.0" + } +} diff --git a/apps/processing-service/src/index.ts b/apps/processing-service/src/index.ts index ca4e04d..27d5d62 100644 --- a/apps/processing-service/src/index.ts +++ b/apps/processing-service/src/index.ts @@ -1,54 +1,54 @@ -/** - * Processing Service - Technical indicators and data processing - */ -import { getLogger } from '@stock-bot/logger'; -import { loadEnvVariables } from '@stock-bot/config'; -import { Hono } from 'hono'; -import { serve } from '@hono/node-server'; - -// Load environment variables -loadEnvVariables(); - -const app = new Hono(); -const logger = getLogger('processing-service'); -const PORT = parseInt(process.env.PROCESSING_SERVICE_PORT || '3003'); - -// Health check endpoint -app.get('/health', (c) => { - return c.json({ - service: 'processing-service', - status: 'healthy', - timestamp: new Date().toISOString() - }); -}); - -// Technical indicators endpoint -app.post('/api/indicators', async (c) => { - const body = await c.req.json(); - logger.info('Technical indicators request', { indicators: body.indicators }); - - // TODO: Implement technical indicators processing - return c.json({ - message: 'Technical indicators endpoint - not implemented yet', - requestedIndicators: body.indicators - }); -}); - -// Vectorized processing endpoint -app.post('/api/vectorized/process', async (c) => { - const body = await c.req.json(); - logger.info('Vectorized processing request', { dataPoints: body.data?.length }); - - // TODO: Implement vectorized processing - return c.json({ - message: 'Vectorized processing endpoint - not implemented yet' - }); -}); - -// Start server -serve({ - fetch: app.fetch, - port: PORT, -}); - -logger.info(`Processing Service started on port ${PORT}`); +/** + * Processing Service - Technical indicators and data processing + */ +import { getLogger } from '@stock-bot/logger'; +import { loadEnvVariables } from '@stock-bot/config'; +import { Hono } from 'hono'; +import { serve } from '@hono/node-server'; + +// Load environment variables +loadEnvVariables(); + +const app = new Hono(); +const logger = getLogger('processing-service'); +const PORT = parseInt(process.env.PROCESSING_SERVICE_PORT || '3003'); + +// Health check endpoint +app.get('/health', (c) => { + return c.json({ + service: 'processing-service', + status: 'healthy', + timestamp: new Date().toISOString() + }); +}); + +// Technical indicators endpoint +app.post('/api/indicators', async (c) => { + const body = await c.req.json(); + logger.info('Technical indicators request', { indicators: body.indicators }); + + // TODO: Implement technical indicators processing + return c.json({ + message: 'Technical indicators endpoint - not implemented yet', + requestedIndicators: body.indicators + }); +}); + +// Vectorized processing endpoint +app.post('/api/vectorized/process', async (c) => { + const body = await c.req.json(); + logger.info('Vectorized processing request', { dataPoints: body.data?.length }); + + // TODO: Implement vectorized processing + return c.json({ + message: 'Vectorized processing endpoint - not implemented yet' + }); +}); + +// Start server +serve({ + fetch: app.fetch, + port: PORT, +}); + +logger.info(`Processing Service started on port ${PORT}`); diff --git a/apps/processing-service/src/indicators/indicators.ts b/apps/processing-service/src/indicators/indicators.ts index 847ed06..454de15 100644 --- a/apps/processing-service/src/indicators/indicators.ts +++ b/apps/processing-service/src/indicators/indicators.ts @@ -1,82 +1,82 @@ -/** - * Technical Indicators Service - * Leverages @stock-bot/utils for calculations - */ -import { getLogger } from '@stock-bot/logger'; -import { - sma, - ema, - rsi, - macd -} from '@stock-bot/utils'; - -const logger = getLogger('indicators-service'); - -export interface IndicatorRequest { - symbol: string; - data: number[]; - indicators: string[]; - parameters?: Record; -} - -export interface IndicatorResult { - symbol: string; - timestamp: Date; - indicators: Record; -} - -export class IndicatorsService { - async calculateIndicators(request: IndicatorRequest): Promise { - logger.info('Calculating indicators', { - symbol: request.symbol, - indicators: request.indicators, - dataPoints: request.data.length - }); - - const results: Record = {}; - - for (const indicator of request.indicators) { - try { - switch (indicator.toLowerCase()) { - case 'sma': - const smaPeriod = request.parameters?.smaPeriod || 20; - results.sma = sma(request.data, smaPeriod); - break; - - case 'ema': - const emaPeriod = request.parameters?.emaPeriod || 20; - results.ema = ema(request.data, emaPeriod); - break; - - case 'rsi': - const rsiPeriod = request.parameters?.rsiPeriod || 14; - results.rsi = rsi(request.data, rsiPeriod); - break; - - case 'macd': - const fast = request.parameters?.macdFast || 12; - const slow = request.parameters?.macdSlow || 26; - const signal = request.parameters?.macdSignal || 9; - results.macd = macd(request.data, fast, slow, signal).macd; - break; - - case 'stochastic': - // TODO: Implement stochastic oscillator - logger.warn('Stochastic oscillator not implemented yet'); - break; - - default: - logger.warn('Unknown indicator requested', { indicator }); - } - } catch (error) { - logger.error('Error calculating indicator', { indicator, error }); - } - } - - return { - symbol: request.symbol, - timestamp: new Date(), - indicators: results - }; - } -} +/** + * Technical Indicators Service + * Leverages @stock-bot/utils for calculations + */ +import { getLogger } from '@stock-bot/logger'; +import { + sma, + ema, + rsi, + macd +} from '@stock-bot/utils'; + +const logger = getLogger('indicators-service'); + +export interface IndicatorRequest { + symbol: string; + data: number[]; + indicators: string[]; + parameters?: Record; +} + +export interface IndicatorResult { + symbol: string; + timestamp: Date; + indicators: Record; +} + +export class IndicatorsService { + async calculateIndicators(request: IndicatorRequest): Promise { + logger.info('Calculating indicators', { + symbol: request.symbol, + indicators: request.indicators, + dataPoints: request.data.length + }); + + const results: Record = {}; + + for (const indicator of request.indicators) { + try { + switch (indicator.toLowerCase()) { + case 'sma': + const smaPeriod = request.parameters?.smaPeriod || 20; + results.sma = sma(request.data, smaPeriod); + break; + + case 'ema': + const emaPeriod = request.parameters?.emaPeriod || 20; + results.ema = ema(request.data, emaPeriod); + break; + + case 'rsi': + const rsiPeriod = request.parameters?.rsiPeriod || 14; + results.rsi = rsi(request.data, rsiPeriod); + break; + + case 'macd': + const fast = request.parameters?.macdFast || 12; + const slow = request.parameters?.macdSlow || 26; + const signal = request.parameters?.macdSignal || 9; + results.macd = macd(request.data, fast, slow, signal).macd; + break; + + case 'stochastic': + // TODO: Implement stochastic oscillator + logger.warn('Stochastic oscillator not implemented yet'); + break; + + default: + logger.warn('Unknown indicator requested', { indicator }); + } + } catch (error) { + logger.error('Error calculating indicator', { indicator, error }); + } + } + + return { + symbol: request.symbol, + timestamp: new Date(), + indicators: results + }; + } +} diff --git a/apps/processing-service/tsconfig.json b/apps/processing-service/tsconfig.json index 43b0af8..0b48f03 100644 --- a/apps/processing-service/tsconfig.json +++ b/apps/processing-service/tsconfig.json @@ -1,20 +1,20 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], - "references": [ - { "path": "../../libs/types" }, - { "path": "../../libs/config" }, - { "path": "../../libs/logger" }, - { "path": "../../libs/utils" }, - { "path": "../../libs/data-frame" }, - { "path": "../../libs/vector-engine" }, - { "path": "../../libs/mongodb-client" }, - { "path": "../../libs/event-bus" }, - { "path": "../../libs/shutdown" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], + "references": [ + { "path": "../../libs/types" }, + { "path": "../../libs/config" }, + { "path": "../../libs/logger" }, + { "path": "../../libs/utils" }, + { "path": "../../libs/data-frame" }, + { "path": "../../libs/vector-engine" }, + { "path": "../../libs/mongodb-client" }, + { "path": "../../libs/event-bus" }, + { "path": "../../libs/shutdown" } + ] +} diff --git a/apps/processing-service/turbo.json b/apps/processing-service/turbo.json index 87da996..ebaa9be 100644 --- a/apps/processing-service/turbo.json +++ b/apps/processing-service/turbo.json @@ -1,20 +1,20 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": [ - "@stock-bot/types#build", - "@stock-bot/config#build", - "@stock-bot/logger#build", - "@stock-bot/utils#build", - "@stock-bot/data-frame#build", - "@stock-bot/vector-engine#build", - "@stock-bot/mongodb-client#build", - "@stock-bot/event-bus#build", - "@stock-bot/shutdown#build" - ], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": [ + "@stock-bot/types#build", + "@stock-bot/config#build", + "@stock-bot/logger#build", + "@stock-bot/utils#build", + "@stock-bot/data-frame#build", + "@stock-bot/vector-engine#build", + "@stock-bot/mongodb-client#build", + "@stock-bot/event-bus#build", + "@stock-bot/shutdown#build" + ], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/apps/strategy-service/package.json b/apps/strategy-service/package.json index f7c8be2..2695f62 100644 --- a/apps/strategy-service/package.json +++ b/apps/strategy-service/package.json @@ -1,33 +1,33 @@ -{ - "name": "@stock-bot/strategy-service", - "version": "1.0.0", - "description": "Combined strategy execution and multi-mode backtesting service", - "main": "dist/index.js", - "type": "module", - "scripts": { - "devvvvv": "bun --watch src/index.ts", - "build": "bun build src/index.ts --outdir dist --target node", - "start": "bun dist/index.js", - "test": "bun test", "clean": "rm -rf dist", - "backtest": "bun src/cli/index.ts", - "optimize": "bun src/cli/index.ts optimize", - "cli": "bun src/cli/index.ts" - }, - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "@stock-bot/utils": "*", - "@stock-bot/event-bus": "*", - "@stock-bot/strategy-engine": "*", - "@stock-bot/vector-engine": "*", - "@stock-bot/data-frame": "*", - "@stock-bot/questdb-client": "*", - "hono": "^4.0.0", - "commander": "^11.0.0" - }, - "devDependencies": { - "@types/node": "^20.0.0", - "typescript": "^5.0.0" - } -} +{ + "name": "@stock-bot/strategy-service", + "version": "1.0.0", + "description": "Combined strategy execution and multi-mode backtesting service", + "main": "dist/index.js", + "type": "module", + "scripts": { + "devvvvv": "bun --watch src/index.ts", + "build": "bun build src/index.ts --outdir dist --target node", + "start": "bun dist/index.js", + "test": "bun test", "clean": "rm -rf dist", + "backtest": "bun src/cli/index.ts", + "optimize": "bun src/cli/index.ts optimize", + "cli": "bun src/cli/index.ts" + }, + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "@stock-bot/utils": "*", + "@stock-bot/event-bus": "*", + "@stock-bot/strategy-engine": "*", + "@stock-bot/vector-engine": "*", + "@stock-bot/data-frame": "*", + "@stock-bot/questdb-client": "*", + "hono": "^4.0.0", + "commander": "^11.0.0" + }, + "devDependencies": { + "@types/node": "^20.0.0", + "typescript": "^5.0.0" + } +} diff --git a/apps/strategy-service/src/backtesting/modes/event-mode.ts b/apps/strategy-service/src/backtesting/modes/event-mode.ts index c9efdbd..38a1a01 100644 --- a/apps/strategy-service/src/backtesting/modes/event-mode.ts +++ b/apps/strategy-service/src/backtesting/modes/event-mode.ts @@ -1,75 +1,75 @@ -/** - * Event-Driven Backtesting Mode - * Processes data point by point with realistic order execution - */ -import { ExecutionMode, Order, OrderResult, MarketData } from '../../framework/execution-mode'; - -export interface BacktestConfig { - startDate: Date; - endDate: Date; - initialCapital: number; - slippageModel?: string; - commissionModel?: string; -} - -export class EventMode extends ExecutionMode { - name = 'event-driven'; - private simulationTime: Date; - private historicalData: Map = new Map(); - - constructor(private config: BacktestConfig) { - super(); - this.simulationTime = config.startDate; - } - - async executeOrder(order: Order): Promise { - this.logger.debug('Simulating order execution', { - orderId: order.id, - simulationTime: this.simulationTime - }); - - // TODO: Implement realistic order simulation - // Include slippage, commission, market impact - const simulatedResult: OrderResult = { - orderId: order.id, - symbol: order.symbol, - executedQuantity: order.quantity, - executedPrice: 100, // TODO: Get realistic price - commission: 1.0, // TODO: Calculate based on commission model - slippage: 0.01, // TODO: Calculate based on slippage model - timestamp: this.simulationTime, - executionTime: 50 // ms - }; - - return simulatedResult; - } - - getCurrentTime(): Date { - return this.simulationTime; - } - - async getMarketData(symbol: string): Promise { - const data = this.historicalData.get(symbol) || []; - const currentData = data.find(d => d.timestamp <= this.simulationTime); - - if (!currentData) { - throw new Error(`No market data available for ${symbol} at ${this.simulationTime}`); - } - - return currentData; - } - - async publishEvent(event: string, data: any): Promise { - // In-memory event bus for simulation - this.logger.debug('Publishing simulation event', { event, data }); - } - - // Simulation control methods - advanceTime(newTime: Date): void { - this.simulationTime = newTime; - } - - loadHistoricalData(symbol: string, data: MarketData[]): void { - this.historicalData.set(symbol, data); - } -} +/** + * Event-Driven Backtesting Mode + * Processes data point by point with realistic order execution + */ +import { ExecutionMode, Order, OrderResult, MarketData } from '../../framework/execution-mode'; + +export interface BacktestConfig { + startDate: Date; + endDate: Date; + initialCapital: number; + slippageModel?: string; + commissionModel?: string; +} + +export class EventMode extends ExecutionMode { + name = 'event-driven'; + private simulationTime: Date; + private historicalData: Map = new Map(); + + constructor(private config: BacktestConfig) { + super(); + this.simulationTime = config.startDate; + } + + async executeOrder(order: Order): Promise { + this.logger.debug('Simulating order execution', { + orderId: order.id, + simulationTime: this.simulationTime + }); + + // TODO: Implement realistic order simulation + // Include slippage, commission, market impact + const simulatedResult: OrderResult = { + orderId: order.id, + symbol: order.symbol, + executedQuantity: order.quantity, + executedPrice: 100, // TODO: Get realistic price + commission: 1.0, // TODO: Calculate based on commission model + slippage: 0.01, // TODO: Calculate based on slippage model + timestamp: this.simulationTime, + executionTime: 50 // ms + }; + + return simulatedResult; + } + + getCurrentTime(): Date { + return this.simulationTime; + } + + async getMarketData(symbol: string): Promise { + const data = this.historicalData.get(symbol) || []; + const currentData = data.find(d => d.timestamp <= this.simulationTime); + + if (!currentData) { + throw new Error(`No market data available for ${symbol} at ${this.simulationTime}`); + } + + return currentData; + } + + async publishEvent(event: string, data: any): Promise { + // In-memory event bus for simulation + this.logger.debug('Publishing simulation event', { event, data }); + } + + // Simulation control methods + advanceTime(newTime: Date): void { + this.simulationTime = newTime; + } + + loadHistoricalData(symbol: string, data: MarketData[]): void { + this.historicalData.set(symbol, data); + } +} diff --git a/apps/strategy-service/src/backtesting/modes/hybrid-mode.ts b/apps/strategy-service/src/backtesting/modes/hybrid-mode.ts index bafdb6a..ff90e99 100644 --- a/apps/strategy-service/src/backtesting/modes/hybrid-mode.ts +++ b/apps/strategy-service/src/backtesting/modes/hybrid-mode.ts @@ -1,422 +1,422 @@ -import { getLogger } from '@stock-bot/logger'; -import { EventBus } from '@stock-bot/event-bus'; -import { VectorEngine, VectorizedBacktestResult } from '@stock-bot/vector-engine'; -import { DataFrame } from '@stock-bot/data-frame'; -import { ExecutionMode, BacktestContext, BacktestResult } from '../framework/execution-mode'; -import { EventMode } from './event-mode'; -import VectorizedMode from './vectorized-mode'; -import { create } from 'domain'; - -export interface HybridModeConfig { - vectorizedThreshold: number; // Switch to vectorized if data points > threshold - warmupPeriod: number; // Number of periods for initial vectorized calculation - eventDrivenRealtime: boolean; // Use event-driven for real-time portions - optimizeIndicators: boolean; // Pre-calculate indicators vectorized - batchSize: number; // Size of batches for hybrid processing -} - -export class HybridMode extends ExecutionMode { - private vectorEngine: VectorEngine; - private eventMode: EventMode; - private vectorizedMode: VectorizedMode; - private config: HybridModeConfig; - private precomputedIndicators: Map = new Map(); - private currentIndex: number = 0; - - constructor( - context: BacktestContext, - eventBus: EventBus, - config: HybridModeConfig = {} - ) { - super(context, eventBus); - - this.config = { - vectorizedThreshold: 50000, - warmupPeriod: 1000, - eventDrivenRealtime: true, - optimizeIndicators: true, - batchSize: 10000, - ...config - }; - - this.vectorEngine = new VectorEngine(); - this.eventMode = new EventMode(context, eventBus); - this.vectorizedMode = new VectorizedMode(context, eventBus); - - this.logger = getLogger('hybrid-mode'); - } - - async initialize(): Promise { - await super.initialize(); - - // Initialize both modes - await this.eventMode.initialize(); - await this.vectorizedMode.initialize(); - - this.logger.info('Hybrid mode initialized', { - backtestId: this.context.backtestId, - config: this.config - }); - } - - async execute(): Promise { - const startTime = Date.now(); - this.logger.info('Starting hybrid backtest execution'); - - try { - // Determine execution strategy based on data size - const dataSize = await this.estimateDataSize(); - - if (dataSize <= this.config.vectorizedThreshold) { - // Small dataset: use pure vectorized approach - this.logger.info('Using pure vectorized approach for small dataset', { dataSize }); - return await this.vectorizedMode.execute(); - } - - // Large dataset: use hybrid approach - this.logger.info('Using hybrid approach for large dataset', { dataSize }); - return await this.executeHybrid(startTime); - - } catch (error) { - this.logger.error('Hybrid backtest failed', { - error, - backtestId: this.context.backtestId - }); - - await this.eventBus.publishBacktestUpdate( - this.context.backtestId, - 0, - { status: 'failed', error: error.message } - ); - - throw error; - } - } - - private async executeHybrid(startTime: number): Promise { - // Phase 1: Vectorized warmup and indicator pre-computation - const warmupResult = await this.executeWarmupPhase(); - - // Phase 2: Event-driven processing with pre-computed indicators - const eventResult = await this.executeEventPhase(warmupResult); - - // Phase 3: Combine results - const combinedResult = this.combineResults(warmupResult, eventResult, startTime); - - await this.eventBus.publishBacktestUpdate( - this.context.backtestId, - 100, - { status: 'completed', result: combinedResult } - ); - - this.logger.info('Hybrid backtest completed', { - backtestId: this.context.backtestId, - duration: Date.now() - startTime, - totalTrades: combinedResult.trades.length, - warmupTrades: warmupResult.trades.length, - eventTrades: eventResult.trades.length - }); - - return combinedResult; - } - - private async executeWarmupPhase(): Promise { - this.logger.info('Executing vectorized warmup phase', { - warmupPeriod: this.config.warmupPeriod - }); - - // Load warmup data - const warmupData = await this.loadWarmupData(); - const dataFrame = this.createDataFrame(warmupData); - - // Pre-compute indicators for entire dataset if optimization is enabled - if (this.config.optimizeIndicators) { - await this.precomputeIndicators(dataFrame); - } - - // Run vectorized backtest on warmup period - const strategyCode = this.generateStrategyCode(); - const vectorResult = await this.vectorEngine.executeVectorizedStrategy( - dataFrame.head(this.config.warmupPeriod), - strategyCode - ); - - // Convert to standard format - return this.convertVectorizedResult(vectorResult, Date.now()); - } - - private async executeEventPhase(warmupResult: BacktestResult): Promise { - this.logger.info('Executing event-driven phase'); - - // Set up event mode with warmup context - this.currentIndex = this.config.warmupPeriod; - - // Create modified context for event phase - const eventContext: BacktestContext = { - ...this.context, - initialPortfolio: this.extractFinalPortfolio(warmupResult) - }; - - // Execute event-driven backtest for remaining data - const eventMode = new EventMode(eventContext, this.eventBus); - await eventMode.initialize(); - - // Override indicator calculations to use pre-computed values - if (this.config.optimizeIndicators) { - this.overrideIndicatorCalculations(eventMode); - } - - return await eventMode.execute(); - } - - private async precomputeIndicators(dataFrame: DataFrame): Promise { - this.logger.info('Pre-computing indicators vectorized'); - - const close = dataFrame.getColumn('close'); - const high = dataFrame.getColumn('high'); - const low = dataFrame.getColumn('low'); - - // Import technical indicators from vector engine - const { TechnicalIndicators } = await import('@stock-bot/vector-engine'); - - // Pre-compute common indicators - this.precomputedIndicators.set('sma_20', TechnicalIndicators.sma(close, 20)); - this.precomputedIndicators.set('sma_50', TechnicalIndicators.sma(close, 50)); - this.precomputedIndicators.set('ema_12', TechnicalIndicators.ema(close, 12)); - this.precomputedIndicators.set('ema_26', TechnicalIndicators.ema(close, 26)); - this.precomputedIndicators.set('rsi', TechnicalIndicators.rsi(close)); - this.precomputedIndicators.set('atr', TechnicalIndicators.atr(high, low, close)); - - const macd = TechnicalIndicators.macd(close); - this.precomputedIndicators.set('macd', macd.macd); - this.precomputedIndicators.set('macd_signal', macd.signal); - this.precomputedIndicators.set('macd_histogram', macd.histogram); - - const bb = TechnicalIndicators.bollingerBands(close); - this.precomputedIndicators.set('bb_upper', bb.upper); - this.precomputedIndicators.set('bb_middle', bb.middle); - this.precomputedIndicators.set('bb_lower', bb.lower); - - this.logger.info('Indicators pre-computed', { - indicators: Array.from(this.precomputedIndicators.keys()) - }); - } - - private overrideIndicatorCalculations(eventMode: EventMode): void { - // Override the event mode's indicator calculations to use pre-computed values - // This is a simplified approach - in production you'd want a more sophisticated interface - const originalCalculateIndicators = (eventMode as any).calculateIndicators; - - (eventMode as any).calculateIndicators = (symbol: string, index: number) => { - const indicators: Record = {}; - - for (const [name, values] of this.precomputedIndicators.entries()) { - if (index < values.length) { - indicators[name] = values[index]; - } - } - - return indicators; - }; - } - - private async estimateDataSize(): Promise { - // Estimate the number of data points for the backtest period - const startTime = new Date(this.context.startDate).getTime(); - const endTime = new Date(this.context.endDate).getTime(); - const timeRange = endTime - startTime; - - // Assume 1-minute intervals (60000ms) - const estimatedPoints = Math.floor(timeRange / 60000); - - this.logger.debug('Estimated data size', { - timeRange, - estimatedPoints, - threshold: this.config.vectorizedThreshold - }); - - return estimatedPoints; - } - - private async loadWarmupData(): Promise { - // Load historical data for warmup phase - // This should load more data than just the warmup period for indicator calculations - const data = []; - const startTime = new Date(this.context.startDate).getTime(); - const warmupEndTime = startTime + (this.config.warmupPeriod * 60000); - - // Add extra lookback for indicator calculations - const lookbackTime = startTime - (200 * 60000); // 200 periods lookback - - for (let timestamp = lookbackTime; timestamp <= warmupEndTime; timestamp += 60000) { - const basePrice = 100 + Math.sin(timestamp / 1000000) * 10; - const volatility = 0.02; - - const open = basePrice + (Math.random() - 0.5) * volatility * basePrice; - const close = open + (Math.random() - 0.5) * volatility * basePrice; - const high = Math.max(open, close) + Math.random() * volatility * basePrice; - const low = Math.min(open, close) - Math.random() * volatility * basePrice; - const volume = Math.floor(Math.random() * 10000) + 1000; - - data.push({ - timestamp, - symbol: this.context.symbol, - open, - high, - low, - close, - volume - }); - } - - return data; - } - - private createDataFrame(data: any[]): DataFrame { - return new DataFrame(data, { - columns: ['timestamp', 'symbol', 'open', 'high', 'low', 'close', 'volume'], - dtypes: { - timestamp: 'number', - symbol: 'string', - open: 'number', - high: 'number', - low: 'number', - close: 'number', - volume: 'number' - } - }); - } - - private generateStrategyCode(): string { - // Generate strategy code based on context - const strategy = this.context.strategy; - - if (strategy.type === 'sma_crossover') { - return 'sma_crossover'; - } - - return strategy.code || 'sma_crossover'; - } - - private convertVectorizedResult(vectorResult: VectorizedBacktestResult, startTime: number): BacktestResult { - return { - backtestId: this.context.backtestId, - strategy: this.context.strategy, - symbol: this.context.symbol, - startDate: this.context.startDate, - endDate: this.context.endDate, - mode: 'hybrid-vectorized', - duration: Date.now() - startTime, - trades: vectorResult.trades.map(trade => ({ - id: `trade_${trade.entryIndex}_${trade.exitIndex}`, - symbol: this.context.symbol, - side: trade.side, - entryTime: vectorResult.timestamps[trade.entryIndex], - exitTime: vectorResult.timestamps[trade.exitIndex], - entryPrice: trade.entryPrice, - exitPrice: trade.exitPrice, - quantity: trade.quantity, - pnl: trade.pnl, - commission: 0, - slippage: 0 - })), - performance: { - totalReturn: vectorResult.metrics.totalReturns, - sharpeRatio: vectorResult.metrics.sharpeRatio, - maxDrawdown: vectorResult.metrics.maxDrawdown, - winRate: vectorResult.metrics.winRate, - profitFactor: vectorResult.metrics.profitFactor, - totalTrades: vectorResult.metrics.totalTrades, - winningTrades: vectorResult.trades.filter(t => t.pnl > 0).length, - losingTrades: vectorResult.trades.filter(t => t.pnl <= 0).length, - avgTrade: vectorResult.metrics.avgTrade, - avgWin: vectorResult.trades.filter(t => t.pnl > 0) - .reduce((sum, t) => sum + t.pnl, 0) / vectorResult.trades.filter(t => t.pnl > 0).length || 0, - avgLoss: vectorResult.trades.filter(t => t.pnl <= 0) - .reduce((sum, t) => sum + t.pnl, 0) / vectorResult.trades.filter(t => t.pnl <= 0).length || 0, - largestWin: Math.max(...vectorResult.trades.map(t => t.pnl), 0), - largestLoss: Math.min(...vectorResult.trades.map(t => t.pnl), 0) - }, - equity: vectorResult.equity, - drawdown: vectorResult.metrics.drawdown, - metadata: { - mode: 'hybrid-vectorized', - dataPoints: vectorResult.timestamps.length, - signals: Object.keys(vectorResult.signals), - optimizations: ['vectorized_warmup', 'precomputed_indicators'] - } - }; - } - - private extractFinalPortfolio(warmupResult: BacktestResult): any { - // Extract the final portfolio state from warmup phase - const finalEquity = warmupResult.equity[warmupResult.equity.length - 1] || 10000; - - return { - cash: finalEquity, - positions: [], // Simplified - in production would track actual positions - equity: finalEquity - }; - } - - private combineResults(warmupResult: BacktestResult, eventResult: BacktestResult, startTime: number): BacktestResult { - // Combine results from both phases - const combinedTrades = [...warmupResult.trades, ...eventResult.trades]; - const combinedEquity = [...warmupResult.equity, ...eventResult.equity]; - const combinedDrawdown = [...(warmupResult.drawdown || []), ...(eventResult.drawdown || [])]; - - // Recalculate combined performance metrics - const totalPnL = combinedTrades.reduce((sum, trade) => sum + trade.pnl, 0); - const winningTrades = combinedTrades.filter(t => t.pnl > 0); - const losingTrades = combinedTrades.filter(t => t.pnl <= 0); - - const grossProfit = winningTrades.reduce((sum, t) => sum + t.pnl, 0); - const grossLoss = Math.abs(losingTrades.reduce((sum, t) => sum + t.pnl, 0)); - - return { - backtestId: this.context.backtestId, - strategy: this.context.strategy, - symbol: this.context.symbol, - startDate: this.context.startDate, - endDate: this.context.endDate, - mode: 'hybrid', - duration: Date.now() - startTime, - trades: combinedTrades, - performance: { - totalReturn: (combinedEquity[combinedEquity.length - 1] - combinedEquity[0]) / combinedEquity[0], - sharpeRatio: eventResult.performance.sharpeRatio, // Use event result for more accurate calculation - maxDrawdown: Math.max(...combinedDrawdown), - winRate: winningTrades.length / combinedTrades.length, - profitFactor: grossLoss !== 0 ? grossProfit / grossLoss : Infinity, - totalTrades: combinedTrades.length, - winningTrades: winningTrades.length, - losingTrades: losingTrades.length, - avgTrade: totalPnL / combinedTrades.length, - avgWin: grossProfit / winningTrades.length || 0, - avgLoss: grossLoss / losingTrades.length || 0, - largestWin: Math.max(...combinedTrades.map(t => t.pnl), 0), - largestLoss: Math.min(...combinedTrades.map(t => t.pnl), 0) - }, - equity: combinedEquity, - drawdown: combinedDrawdown, - metadata: { - mode: 'hybrid', - phases: ['vectorized-warmup', 'event-driven'], - warmupPeriod: this.config.warmupPeriod, - optimizations: ['precomputed_indicators', 'hybrid_execution'], - warmupTrades: warmupResult.trades.length, - eventTrades: eventResult.trades.length - } - }; - } - - async cleanup(): Promise { - await super.cleanup(); - await this.eventMode.cleanup(); - await this.vectorizedMode.cleanup(); - this.precomputedIndicators.clear(); - this.logger.info('Hybrid mode cleanup completed'); - } -} - -export default HybridMode; +import { getLogger } from '@stock-bot/logger'; +import { EventBus } from '@stock-bot/event-bus'; +import { VectorEngine, VectorizedBacktestResult } from '@stock-bot/vector-engine'; +import { DataFrame } from '@stock-bot/data-frame'; +import { ExecutionMode, BacktestContext, BacktestResult } from '../framework/execution-mode'; +import { EventMode } from './event-mode'; +import VectorizedMode from './vectorized-mode'; +import { create } from 'domain'; + +export interface HybridModeConfig { + vectorizedThreshold: number; // Switch to vectorized if data points > threshold + warmupPeriod: number; // Number of periods for initial vectorized calculation + eventDrivenRealtime: boolean; // Use event-driven for real-time portions + optimizeIndicators: boolean; // Pre-calculate indicators vectorized + batchSize: number; // Size of batches for hybrid processing +} + +export class HybridMode extends ExecutionMode { + private vectorEngine: VectorEngine; + private eventMode: EventMode; + private vectorizedMode: VectorizedMode; + private config: HybridModeConfig; + private precomputedIndicators: Map = new Map(); + private currentIndex: number = 0; + + constructor( + context: BacktestContext, + eventBus: EventBus, + config: HybridModeConfig = {} + ) { + super(context, eventBus); + + this.config = { + vectorizedThreshold: 50000, + warmupPeriod: 1000, + eventDrivenRealtime: true, + optimizeIndicators: true, + batchSize: 10000, + ...config + }; + + this.vectorEngine = new VectorEngine(); + this.eventMode = new EventMode(context, eventBus); + this.vectorizedMode = new VectorizedMode(context, eventBus); + + this.logger = getLogger('hybrid-mode'); + } + + async initialize(): Promise { + await super.initialize(); + + // Initialize both modes + await this.eventMode.initialize(); + await this.vectorizedMode.initialize(); + + this.logger.info('Hybrid mode initialized', { + backtestId: this.context.backtestId, + config: this.config + }); + } + + async execute(): Promise { + const startTime = Date.now(); + this.logger.info('Starting hybrid backtest execution'); + + try { + // Determine execution strategy based on data size + const dataSize = await this.estimateDataSize(); + + if (dataSize <= this.config.vectorizedThreshold) { + // Small dataset: use pure vectorized approach + this.logger.info('Using pure vectorized approach for small dataset', { dataSize }); + return await this.vectorizedMode.execute(); + } + + // Large dataset: use hybrid approach + this.logger.info('Using hybrid approach for large dataset', { dataSize }); + return await this.executeHybrid(startTime); + + } catch (error) { + this.logger.error('Hybrid backtest failed', { + error, + backtestId: this.context.backtestId + }); + + await this.eventBus.publishBacktestUpdate( + this.context.backtestId, + 0, + { status: 'failed', error: error.message } + ); + + throw error; + } + } + + private async executeHybrid(startTime: number): Promise { + // Phase 1: Vectorized warmup and indicator pre-computation + const warmupResult = await this.executeWarmupPhase(); + + // Phase 2: Event-driven processing with pre-computed indicators + const eventResult = await this.executeEventPhase(warmupResult); + + // Phase 3: Combine results + const combinedResult = this.combineResults(warmupResult, eventResult, startTime); + + await this.eventBus.publishBacktestUpdate( + this.context.backtestId, + 100, + { status: 'completed', result: combinedResult } + ); + + this.logger.info('Hybrid backtest completed', { + backtestId: this.context.backtestId, + duration: Date.now() - startTime, + totalTrades: combinedResult.trades.length, + warmupTrades: warmupResult.trades.length, + eventTrades: eventResult.trades.length + }); + + return combinedResult; + } + + private async executeWarmupPhase(): Promise { + this.logger.info('Executing vectorized warmup phase', { + warmupPeriod: this.config.warmupPeriod + }); + + // Load warmup data + const warmupData = await this.loadWarmupData(); + const dataFrame = this.createDataFrame(warmupData); + + // Pre-compute indicators for entire dataset if optimization is enabled + if (this.config.optimizeIndicators) { + await this.precomputeIndicators(dataFrame); + } + + // Run vectorized backtest on warmup period + const strategyCode = this.generateStrategyCode(); + const vectorResult = await this.vectorEngine.executeVectorizedStrategy( + dataFrame.head(this.config.warmupPeriod), + strategyCode + ); + + // Convert to standard format + return this.convertVectorizedResult(vectorResult, Date.now()); + } + + private async executeEventPhase(warmupResult: BacktestResult): Promise { + this.logger.info('Executing event-driven phase'); + + // Set up event mode with warmup context + this.currentIndex = this.config.warmupPeriod; + + // Create modified context for event phase + const eventContext: BacktestContext = { + ...this.context, + initialPortfolio: this.extractFinalPortfolio(warmupResult) + }; + + // Execute event-driven backtest for remaining data + const eventMode = new EventMode(eventContext, this.eventBus); + await eventMode.initialize(); + + // Override indicator calculations to use pre-computed values + if (this.config.optimizeIndicators) { + this.overrideIndicatorCalculations(eventMode); + } + + return await eventMode.execute(); + } + + private async precomputeIndicators(dataFrame: DataFrame): Promise { + this.logger.info('Pre-computing indicators vectorized'); + + const close = dataFrame.getColumn('close'); + const high = dataFrame.getColumn('high'); + const low = dataFrame.getColumn('low'); + + // Import technical indicators from vector engine + const { TechnicalIndicators } = await import('@stock-bot/vector-engine'); + + // Pre-compute common indicators + this.precomputedIndicators.set('sma_20', TechnicalIndicators.sma(close, 20)); + this.precomputedIndicators.set('sma_50', TechnicalIndicators.sma(close, 50)); + this.precomputedIndicators.set('ema_12', TechnicalIndicators.ema(close, 12)); + this.precomputedIndicators.set('ema_26', TechnicalIndicators.ema(close, 26)); + this.precomputedIndicators.set('rsi', TechnicalIndicators.rsi(close)); + this.precomputedIndicators.set('atr', TechnicalIndicators.atr(high, low, close)); + + const macd = TechnicalIndicators.macd(close); + this.precomputedIndicators.set('macd', macd.macd); + this.precomputedIndicators.set('macd_signal', macd.signal); + this.precomputedIndicators.set('macd_histogram', macd.histogram); + + const bb = TechnicalIndicators.bollingerBands(close); + this.precomputedIndicators.set('bb_upper', bb.upper); + this.precomputedIndicators.set('bb_middle', bb.middle); + this.precomputedIndicators.set('bb_lower', bb.lower); + + this.logger.info('Indicators pre-computed', { + indicators: Array.from(this.precomputedIndicators.keys()) + }); + } + + private overrideIndicatorCalculations(eventMode: EventMode): void { + // Override the event mode's indicator calculations to use pre-computed values + // This is a simplified approach - in production you'd want a more sophisticated interface + const originalCalculateIndicators = (eventMode as any).calculateIndicators; + + (eventMode as any).calculateIndicators = (symbol: string, index: number) => { + const indicators: Record = {}; + + for (const [name, values] of this.precomputedIndicators.entries()) { + if (index < values.length) { + indicators[name] = values[index]; + } + } + + return indicators; + }; + } + + private async estimateDataSize(): Promise { + // Estimate the number of data points for the backtest period + const startTime = new Date(this.context.startDate).getTime(); + const endTime = new Date(this.context.endDate).getTime(); + const timeRange = endTime - startTime; + + // Assume 1-minute intervals (60000ms) + const estimatedPoints = Math.floor(timeRange / 60000); + + this.logger.debug('Estimated data size', { + timeRange, + estimatedPoints, + threshold: this.config.vectorizedThreshold + }); + + return estimatedPoints; + } + + private async loadWarmupData(): Promise { + // Load historical data for warmup phase + // This should load more data than just the warmup period for indicator calculations + const data = []; + const startTime = new Date(this.context.startDate).getTime(); + const warmupEndTime = startTime + (this.config.warmupPeriod * 60000); + + // Add extra lookback for indicator calculations + const lookbackTime = startTime - (200 * 60000); // 200 periods lookback + + for (let timestamp = lookbackTime; timestamp <= warmupEndTime; timestamp += 60000) { + const basePrice = 100 + Math.sin(timestamp / 1000000) * 10; + const volatility = 0.02; + + const open = basePrice + (Math.random() - 0.5) * volatility * basePrice; + const close = open + (Math.random() - 0.5) * volatility * basePrice; + const high = Math.max(open, close) + Math.random() * volatility * basePrice; + const low = Math.min(open, close) - Math.random() * volatility * basePrice; + const volume = Math.floor(Math.random() * 10000) + 1000; + + data.push({ + timestamp, + symbol: this.context.symbol, + open, + high, + low, + close, + volume + }); + } + + return data; + } + + private createDataFrame(data: any[]): DataFrame { + return new DataFrame(data, { + columns: ['timestamp', 'symbol', 'open', 'high', 'low', 'close', 'volume'], + dtypes: { + timestamp: 'number', + symbol: 'string', + open: 'number', + high: 'number', + low: 'number', + close: 'number', + volume: 'number' + } + }); + } + + private generateStrategyCode(): string { + // Generate strategy code based on context + const strategy = this.context.strategy; + + if (strategy.type === 'sma_crossover') { + return 'sma_crossover'; + } + + return strategy.code || 'sma_crossover'; + } + + private convertVectorizedResult(vectorResult: VectorizedBacktestResult, startTime: number): BacktestResult { + return { + backtestId: this.context.backtestId, + strategy: this.context.strategy, + symbol: this.context.symbol, + startDate: this.context.startDate, + endDate: this.context.endDate, + mode: 'hybrid-vectorized', + duration: Date.now() - startTime, + trades: vectorResult.trades.map(trade => ({ + id: `trade_${trade.entryIndex}_${trade.exitIndex}`, + symbol: this.context.symbol, + side: trade.side, + entryTime: vectorResult.timestamps[trade.entryIndex], + exitTime: vectorResult.timestamps[trade.exitIndex], + entryPrice: trade.entryPrice, + exitPrice: trade.exitPrice, + quantity: trade.quantity, + pnl: trade.pnl, + commission: 0, + slippage: 0 + })), + performance: { + totalReturn: vectorResult.metrics.totalReturns, + sharpeRatio: vectorResult.metrics.sharpeRatio, + maxDrawdown: vectorResult.metrics.maxDrawdown, + winRate: vectorResult.metrics.winRate, + profitFactor: vectorResult.metrics.profitFactor, + totalTrades: vectorResult.metrics.totalTrades, + winningTrades: vectorResult.trades.filter(t => t.pnl > 0).length, + losingTrades: vectorResult.trades.filter(t => t.pnl <= 0).length, + avgTrade: vectorResult.metrics.avgTrade, + avgWin: vectorResult.trades.filter(t => t.pnl > 0) + .reduce((sum, t) => sum + t.pnl, 0) / vectorResult.trades.filter(t => t.pnl > 0).length || 0, + avgLoss: vectorResult.trades.filter(t => t.pnl <= 0) + .reduce((sum, t) => sum + t.pnl, 0) / vectorResult.trades.filter(t => t.pnl <= 0).length || 0, + largestWin: Math.max(...vectorResult.trades.map(t => t.pnl), 0), + largestLoss: Math.min(...vectorResult.trades.map(t => t.pnl), 0) + }, + equity: vectorResult.equity, + drawdown: vectorResult.metrics.drawdown, + metadata: { + mode: 'hybrid-vectorized', + dataPoints: vectorResult.timestamps.length, + signals: Object.keys(vectorResult.signals), + optimizations: ['vectorized_warmup', 'precomputed_indicators'] + } + }; + } + + private extractFinalPortfolio(warmupResult: BacktestResult): any { + // Extract the final portfolio state from warmup phase + const finalEquity = warmupResult.equity[warmupResult.equity.length - 1] || 10000; + + return { + cash: finalEquity, + positions: [], // Simplified - in production would track actual positions + equity: finalEquity + }; + } + + private combineResults(warmupResult: BacktestResult, eventResult: BacktestResult, startTime: number): BacktestResult { + // Combine results from both phases + const combinedTrades = [...warmupResult.trades, ...eventResult.trades]; + const combinedEquity = [...warmupResult.equity, ...eventResult.equity]; + const combinedDrawdown = [...(warmupResult.drawdown || []), ...(eventResult.drawdown || [])]; + + // Recalculate combined performance metrics + const totalPnL = combinedTrades.reduce((sum, trade) => sum + trade.pnl, 0); + const winningTrades = combinedTrades.filter(t => t.pnl > 0); + const losingTrades = combinedTrades.filter(t => t.pnl <= 0); + + const grossProfit = winningTrades.reduce((sum, t) => sum + t.pnl, 0); + const grossLoss = Math.abs(losingTrades.reduce((sum, t) => sum + t.pnl, 0)); + + return { + backtestId: this.context.backtestId, + strategy: this.context.strategy, + symbol: this.context.symbol, + startDate: this.context.startDate, + endDate: this.context.endDate, + mode: 'hybrid', + duration: Date.now() - startTime, + trades: combinedTrades, + performance: { + totalReturn: (combinedEquity[combinedEquity.length - 1] - combinedEquity[0]) / combinedEquity[0], + sharpeRatio: eventResult.performance.sharpeRatio, // Use event result for more accurate calculation + maxDrawdown: Math.max(...combinedDrawdown), + winRate: winningTrades.length / combinedTrades.length, + profitFactor: grossLoss !== 0 ? grossProfit / grossLoss : Infinity, + totalTrades: combinedTrades.length, + winningTrades: winningTrades.length, + losingTrades: losingTrades.length, + avgTrade: totalPnL / combinedTrades.length, + avgWin: grossProfit / winningTrades.length || 0, + avgLoss: grossLoss / losingTrades.length || 0, + largestWin: Math.max(...combinedTrades.map(t => t.pnl), 0), + largestLoss: Math.min(...combinedTrades.map(t => t.pnl), 0) + }, + equity: combinedEquity, + drawdown: combinedDrawdown, + metadata: { + mode: 'hybrid', + phases: ['vectorized-warmup', 'event-driven'], + warmupPeriod: this.config.warmupPeriod, + optimizations: ['precomputed_indicators', 'hybrid_execution'], + warmupTrades: warmupResult.trades.length, + eventTrades: eventResult.trades.length + } + }; + } + + async cleanup(): Promise { + await super.cleanup(); + await this.eventMode.cleanup(); + await this.vectorizedMode.cleanup(); + this.precomputedIndicators.clear(); + this.logger.info('Hybrid mode cleanup completed'); + } +} + +export default HybridMode; diff --git a/apps/strategy-service/src/backtesting/modes/live-mode.ts b/apps/strategy-service/src/backtesting/modes/live-mode.ts index fc6ad1d..9aad81e 100644 --- a/apps/strategy-service/src/backtesting/modes/live-mode.ts +++ b/apps/strategy-service/src/backtesting/modes/live-mode.ts @@ -1,31 +1,31 @@ -/** - * Live Trading Mode - * Executes orders through real brokers - */ -import { ExecutionMode, Order, OrderResult, MarketData } from '../../framework/execution-mode'; - -export class LiveMode extends ExecutionMode { - name = 'live'; - - async executeOrder(order: Order): Promise { - this.logger.info('Executing live order', { orderId: order.id }); - - // TODO: Implement real broker integration - // This will connect to actual brokerage APIs - throw new Error('Live broker integration not implemented yet'); - } - - getCurrentTime(): Date { - return new Date(); // Real time - } - - async getMarketData(symbol: string): Promise { - // TODO: Get live market data - throw new Error('Live market data fetching not implemented yet'); - } - - async publishEvent(event: string, data: any): Promise { - // TODO: Publish to real event bus (Dragonfly) - this.logger.debug('Publishing event', { event, data }); - } -} +/** + * Live Trading Mode + * Executes orders through real brokers + */ +import { ExecutionMode, Order, OrderResult, MarketData } from '../../framework/execution-mode'; + +export class LiveMode extends ExecutionMode { + name = 'live'; + + async executeOrder(order: Order): Promise { + this.logger.info('Executing live order', { orderId: order.id }); + + // TODO: Implement real broker integration + // This will connect to actual brokerage APIs + throw new Error('Live broker integration not implemented yet'); + } + + getCurrentTime(): Date { + return new Date(); // Real time + } + + async getMarketData(symbol: string): Promise { + // TODO: Get live market data + throw new Error('Live market data fetching not implemented yet'); + } + + async publishEvent(event: string, data: any): Promise { + // TODO: Publish to real event bus (Dragonfly) + this.logger.debug('Publishing event', { event, data }); + } +} diff --git a/apps/strategy-service/src/backtesting/modes/vectorized-mode.ts b/apps/strategy-service/src/backtesting/modes/vectorized-mode.ts index 73bd639..9529b9a 100644 --- a/apps/strategy-service/src/backtesting/modes/vectorized-mode.ts +++ b/apps/strategy-service/src/backtesting/modes/vectorized-mode.ts @@ -1,239 +1,239 @@ -import { getLogger } from '@stock-bot/logger'; -import { EventBus } from '@stock-bot/event-bus'; -import { VectorEngine, VectorizedBacktestResult } from '@stock-bot/vector-engine'; -import { DataFrame } from '@stock-bot/data-frame'; -import { ExecutionMode, BacktestContext, BacktestResult } from '../framework/execution-mode'; - -export interface VectorizedModeConfig { - batchSize?: number; - enableOptimization?: boolean; - parallelProcessing?: boolean; -} - -export class VectorizedMode extends ExecutionMode { - private vectorEngine: VectorEngine; - private config: VectorizedModeConfig; - private logger = getLogger('vectorized-mode'); - - constructor( - context: BacktestContext, - eventBus: EventBus, - config: VectorizedModeConfig = {} - ) { - super(context, eventBus); - this.vectorEngine = new VectorEngine(); - this.config = { - batchSize: 10000, - enableOptimization: true, - parallelProcessing: true, - ...config - }; - } - - async initialize(): Promise { - await super.initialize(); - this.logger.info('Vectorized mode initialized', { - backtestId: this.context.backtestId, - config: this.config - }); - } - - async execute(): Promise { - const startTime = Date.now(); - this.logger.info('Starting vectorized backtest execution'); - - try { - // Load all data at once for vectorized processing - const data = await this.loadHistoricalData(); - - // Convert to DataFrame format - const dataFrame = this.createDataFrame(data); - - // Execute vectorized strategy - const strategyCode = this.generateStrategyCode(); - const vectorResult = await this.vectorEngine.executeVectorizedStrategy( - dataFrame, - strategyCode - ); - - // Convert to standard backtest result format - const result = this.convertVectorizedResult(vectorResult, startTime); - - // Emit completion event - await this.eventBus.publishBacktestUpdate( - this.context.backtestId, - 100, - { status: 'completed', result } - ); - - this.logger.info('Vectorized backtest completed', { - backtestId: this.context.backtestId, - duration: Date.now() - startTime, - totalTrades: result.trades.length - }); - - return result; - - } catch (error) { - this.logger.error('Vectorized backtest failed', { - error, - backtestId: this.context.backtestId - }); - - await this.eventBus.publishBacktestUpdate( - this.context.backtestId, - 0, - { status: 'failed', error: error.message } - ); - - throw error; - } - } - - private async loadHistoricalData(): Promise { - // Load all historical data at once - // This is much more efficient than loading tick by tick - const data = []; - - // Simulate loading data (in production, this would be a bulk database query) - const startTime = new Date(this.context.startDate).getTime(); - const endTime = new Date(this.context.endDate).getTime(); - const interval = 60000; // 1 minute intervals - - for (let timestamp = startTime; timestamp <= endTime; timestamp += interval) { - // Simulate OHLCV data - const basePrice = 100 + Math.sin(timestamp / 1000000) * 10; - const volatility = 0.02; - - const open = basePrice + (Math.random() - 0.5) * volatility * basePrice; - const close = open + (Math.random() - 0.5) * volatility * basePrice; - const high = Math.max(open, close) + Math.random() * volatility * basePrice; - const low = Math.min(open, close) - Math.random() * volatility * basePrice; - const volume = Math.floor(Math.random() * 10000) + 1000; - - data.push({ - timestamp, - symbol: this.context.symbol, - open, - high, - low, - close, - volume - }); - } - - return data; - } - - private createDataFrame(data: any[]): DataFrame { - return new DataFrame(data, { - columns: ['timestamp', 'symbol', 'open', 'high', 'low', 'close', 'volume'], - dtypes: { - timestamp: 'number', - symbol: 'string', - open: 'number', - high: 'number', - low: 'number', - close: 'number', - volume: 'number' - } - }); - } - - private generateStrategyCode(): string { - // Convert strategy configuration to vectorized strategy code - // This is a simplified example - in production you'd have a more sophisticated compiler - const strategy = this.context.strategy; - - if (strategy.type === 'sma_crossover') { - return 'sma_crossover'; - } - - // Add more strategy types as needed - return strategy.code || 'sma_crossover'; - } - - private convertVectorizedResult( - vectorResult: VectorizedBacktestResult, - startTime: number - ): BacktestResult { - return { - backtestId: this.context.backtestId, - strategy: this.context.strategy, - symbol: this.context.symbol, - startDate: this.context.startDate, - endDate: this.context.endDate, - mode: 'vectorized', - duration: Date.now() - startTime, - trades: vectorResult.trades.map(trade => ({ - id: `trade_${trade.entryIndex}_${trade.exitIndex}`, - symbol: this.context.symbol, - side: trade.side, - entryTime: vectorResult.timestamps[trade.entryIndex], - exitTime: vectorResult.timestamps[trade.exitIndex], - entryPrice: trade.entryPrice, - exitPrice: trade.exitPrice, - quantity: trade.quantity, - pnl: trade.pnl, - commission: 0, // Simplified - slippage: 0 - })), - performance: { - totalReturn: vectorResult.metrics.totalReturns, - sharpeRatio: vectorResult.metrics.sharpeRatio, - maxDrawdown: vectorResult.metrics.maxDrawdown, - winRate: vectorResult.metrics.winRate, - profitFactor: vectorResult.metrics.profitFactor, - totalTrades: vectorResult.metrics.totalTrades, - winningTrades: vectorResult.trades.filter(t => t.pnl > 0).length, - losingTrades: vectorResult.trades.filter(t => t.pnl <= 0).length, - avgTrade: vectorResult.metrics.avgTrade, - avgWin: vectorResult.trades.filter(t => t.pnl > 0) - .reduce((sum, t) => sum + t.pnl, 0) / vectorResult.trades.filter(t => t.pnl > 0).length || 0, - avgLoss: vectorResult.trades.filter(t => t.pnl <= 0) - .reduce((sum, t) => sum + t.pnl, 0) / vectorResult.trades.filter(t => t.pnl <= 0).length || 0, - largestWin: Math.max(...vectorResult.trades.map(t => t.pnl), 0), - largestLoss: Math.min(...vectorResult.trades.map(t => t.pnl), 0) - }, - equity: vectorResult.equity, - drawdown: vectorResult.metrics.drawdown, - metadata: { - mode: 'vectorized', - dataPoints: vectorResult.timestamps.length, - signals: Object.keys(vectorResult.signals), - optimizations: this.config.enableOptimization ? ['vectorized_computation'] : [] - } - }; - } - - async cleanup(): Promise { - await super.cleanup(); - this.logger.info('Vectorized mode cleanup completed'); - } - - // Batch processing capabilities - async batchBacktest(strategies: Array<{ id: string; config: any }>): Promise> { - this.logger.info('Starting batch vectorized backtest', { - strategiesCount: strategies.length - }); - - const data = await this.loadHistoricalData(); - const dataFrame = this.createDataFrame(data); - - const strategyConfigs = strategies.map(s => ({ - id: s.id, - code: this.generateStrategyCode() - })); - - const batchResults = await this.vectorEngine.batchBacktest(dataFrame, strategyConfigs); - const results: Record = {}; - - for (const [strategyId, vectorResult] of Object.entries(batchResults)) { - results[strategyId] = this.convertVectorizedResult(vectorResult, Date.now()); - } - - return results; - } -} - -export default VectorizedMode; +import { getLogger } from '@stock-bot/logger'; +import { EventBus } from '@stock-bot/event-bus'; +import { VectorEngine, VectorizedBacktestResult } from '@stock-bot/vector-engine'; +import { DataFrame } from '@stock-bot/data-frame'; +import { ExecutionMode, BacktestContext, BacktestResult } from '../framework/execution-mode'; + +export interface VectorizedModeConfig { + batchSize?: number; + enableOptimization?: boolean; + parallelProcessing?: boolean; +} + +export class VectorizedMode extends ExecutionMode { + private vectorEngine: VectorEngine; + private config: VectorizedModeConfig; + private logger = getLogger('vectorized-mode'); + + constructor( + context: BacktestContext, + eventBus: EventBus, + config: VectorizedModeConfig = {} + ) { + super(context, eventBus); + this.vectorEngine = new VectorEngine(); + this.config = { + batchSize: 10000, + enableOptimization: true, + parallelProcessing: true, + ...config + }; + } + + async initialize(): Promise { + await super.initialize(); + this.logger.info('Vectorized mode initialized', { + backtestId: this.context.backtestId, + config: this.config + }); + } + + async execute(): Promise { + const startTime = Date.now(); + this.logger.info('Starting vectorized backtest execution'); + + try { + // Load all data at once for vectorized processing + const data = await this.loadHistoricalData(); + + // Convert to DataFrame format + const dataFrame = this.createDataFrame(data); + + // Execute vectorized strategy + const strategyCode = this.generateStrategyCode(); + const vectorResult = await this.vectorEngine.executeVectorizedStrategy( + dataFrame, + strategyCode + ); + + // Convert to standard backtest result format + const result = this.convertVectorizedResult(vectorResult, startTime); + + // Emit completion event + await this.eventBus.publishBacktestUpdate( + this.context.backtestId, + 100, + { status: 'completed', result } + ); + + this.logger.info('Vectorized backtest completed', { + backtestId: this.context.backtestId, + duration: Date.now() - startTime, + totalTrades: result.trades.length + }); + + return result; + + } catch (error) { + this.logger.error('Vectorized backtest failed', { + error, + backtestId: this.context.backtestId + }); + + await this.eventBus.publishBacktestUpdate( + this.context.backtestId, + 0, + { status: 'failed', error: error.message } + ); + + throw error; + } + } + + private async loadHistoricalData(): Promise { + // Load all historical data at once + // This is much more efficient than loading tick by tick + const data = []; + + // Simulate loading data (in production, this would be a bulk database query) + const startTime = new Date(this.context.startDate).getTime(); + const endTime = new Date(this.context.endDate).getTime(); + const interval = 60000; // 1 minute intervals + + for (let timestamp = startTime; timestamp <= endTime; timestamp += interval) { + // Simulate OHLCV data + const basePrice = 100 + Math.sin(timestamp / 1000000) * 10; + const volatility = 0.02; + + const open = basePrice + (Math.random() - 0.5) * volatility * basePrice; + const close = open + (Math.random() - 0.5) * volatility * basePrice; + const high = Math.max(open, close) + Math.random() * volatility * basePrice; + const low = Math.min(open, close) - Math.random() * volatility * basePrice; + const volume = Math.floor(Math.random() * 10000) + 1000; + + data.push({ + timestamp, + symbol: this.context.symbol, + open, + high, + low, + close, + volume + }); + } + + return data; + } + + private createDataFrame(data: any[]): DataFrame { + return new DataFrame(data, { + columns: ['timestamp', 'symbol', 'open', 'high', 'low', 'close', 'volume'], + dtypes: { + timestamp: 'number', + symbol: 'string', + open: 'number', + high: 'number', + low: 'number', + close: 'number', + volume: 'number' + } + }); + } + + private generateStrategyCode(): string { + // Convert strategy configuration to vectorized strategy code + // This is a simplified example - in production you'd have a more sophisticated compiler + const strategy = this.context.strategy; + + if (strategy.type === 'sma_crossover') { + return 'sma_crossover'; + } + + // Add more strategy types as needed + return strategy.code || 'sma_crossover'; + } + + private convertVectorizedResult( + vectorResult: VectorizedBacktestResult, + startTime: number + ): BacktestResult { + return { + backtestId: this.context.backtestId, + strategy: this.context.strategy, + symbol: this.context.symbol, + startDate: this.context.startDate, + endDate: this.context.endDate, + mode: 'vectorized', + duration: Date.now() - startTime, + trades: vectorResult.trades.map(trade => ({ + id: `trade_${trade.entryIndex}_${trade.exitIndex}`, + symbol: this.context.symbol, + side: trade.side, + entryTime: vectorResult.timestamps[trade.entryIndex], + exitTime: vectorResult.timestamps[trade.exitIndex], + entryPrice: trade.entryPrice, + exitPrice: trade.exitPrice, + quantity: trade.quantity, + pnl: trade.pnl, + commission: 0, // Simplified + slippage: 0 + })), + performance: { + totalReturn: vectorResult.metrics.totalReturns, + sharpeRatio: vectorResult.metrics.sharpeRatio, + maxDrawdown: vectorResult.metrics.maxDrawdown, + winRate: vectorResult.metrics.winRate, + profitFactor: vectorResult.metrics.profitFactor, + totalTrades: vectorResult.metrics.totalTrades, + winningTrades: vectorResult.trades.filter(t => t.pnl > 0).length, + losingTrades: vectorResult.trades.filter(t => t.pnl <= 0).length, + avgTrade: vectorResult.metrics.avgTrade, + avgWin: vectorResult.trades.filter(t => t.pnl > 0) + .reduce((sum, t) => sum + t.pnl, 0) / vectorResult.trades.filter(t => t.pnl > 0).length || 0, + avgLoss: vectorResult.trades.filter(t => t.pnl <= 0) + .reduce((sum, t) => sum + t.pnl, 0) / vectorResult.trades.filter(t => t.pnl <= 0).length || 0, + largestWin: Math.max(...vectorResult.trades.map(t => t.pnl), 0), + largestLoss: Math.min(...vectorResult.trades.map(t => t.pnl), 0) + }, + equity: vectorResult.equity, + drawdown: vectorResult.metrics.drawdown, + metadata: { + mode: 'vectorized', + dataPoints: vectorResult.timestamps.length, + signals: Object.keys(vectorResult.signals), + optimizations: this.config.enableOptimization ? ['vectorized_computation'] : [] + } + }; + } + + async cleanup(): Promise { + await super.cleanup(); + this.logger.info('Vectorized mode cleanup completed'); + } + + // Batch processing capabilities + async batchBacktest(strategies: Array<{ id: string; config: any }>): Promise> { + this.logger.info('Starting batch vectorized backtest', { + strategiesCount: strategies.length + }); + + const data = await this.loadHistoricalData(); + const dataFrame = this.createDataFrame(data); + + const strategyConfigs = strategies.map(s => ({ + id: s.id, + code: this.generateStrategyCode() + })); + + const batchResults = await this.vectorEngine.batchBacktest(dataFrame, strategyConfigs); + const results: Record = {}; + + for (const [strategyId, vectorResult] of Object.entries(batchResults)) { + results[strategyId] = this.convertVectorizedResult(vectorResult, Date.now()); + } + + return results; + } +} + +export default VectorizedMode; diff --git a/apps/strategy-service/src/cli/index.ts b/apps/strategy-service/src/cli/index.ts index 128ad11..e84d9c4 100644 --- a/apps/strategy-service/src/cli/index.ts +++ b/apps/strategy-service/src/cli/index.ts @@ -1,285 +1,285 @@ -#!/usr/bin/env bun -/** - * Strategy Service CLI - * Command-line interface for running backtests and managing strategies - */ - -import { program } from 'commander'; -import { getLogger } from '@stock-bot/logger'; -import { createEventBus } from '@stock-bot/event-bus'; -import { BacktestContext } from '../framework/execution-mode'; -import { LiveMode } from '../backtesting/modes/live-mode'; -import { EventMode } from '../backtesting/modes/event-mode'; -import VectorizedMode from '../backtesting/modes/vectorized-mode'; -import HybridMode from '../backtesting/modes/hybrid-mode'; - -const logger = getLogger('strategy-cli'); - -interface CLIBacktestConfig { - strategy: string; - strategies: string; - symbol: string; - startDate: string; - endDate: string; - mode: 'live' | 'event' | 'vectorized' | 'hybrid'; - initialCapital?: number; - config?: string; - output?: string; - verbose?: boolean; -} - -async function runBacktest(options: CLIBacktestConfig): Promise { - logger.info('Starting backtest from CLI', { options }); - - try { - // Initialize event bus - const eventBus = createEventBus({ - serviceName: 'strategy-cli', - enablePersistence: false // Disable Redis for CLI - }); - - // Create backtest context - const context: BacktestContext = { - backtestId: `cli_${Date.now()}`, - strategy: { - id: options.strategy, - name: options.strategy, - type: options.strategy, - code: options.strategy, - parameters: {} - }, - symbol: options.symbol, - startDate: options.startDate, - endDate: options.endDate, - initialCapital: options.initialCapital || 10000, - mode: options.mode - }; - - // Load additional config if provided - if (options.config) { - const configData = await loadConfig(options.config); - context.strategy.parameters = { ...context.strategy.parameters, ...configData }; - } - - // Create and execute the appropriate mode - let executionMode; - - switch (options.mode) { - case 'live': - executionMode = new LiveMode(context, eventBus); - break; - case 'event': - executionMode = new EventMode(context, eventBus); - break; - case 'vectorized': - executionMode = new VectorizedMode(context, eventBus); - break; - case 'hybrid': - executionMode = new HybridMode(context, eventBus); - break; - default: - throw new Error(`Unknown execution mode: ${options.mode}`); - } - - // Subscribe to progress updates - eventBus.subscribe('backtest.update', (message) => { - const { backtestId, progress, ...data } = message.data; - console.log(`Progress: ${progress}%`, data); - }); - - await executionMode.initialize(); - const result = await executionMode.execute(); - await executionMode.cleanup(); - - // Display results - displayResults(result); - - // Save results if output specified - if (options.output) { - await saveResults(result, options.output); - } - - await eventBus.close(); - - } catch (error) { - logger.error('Backtest failed', error); - process.exit(1); - } -} - -async function loadConfig(configPath: string): Promise { - try { - if (configPath.endsWith('.json')) { - const file = Bun.file(configPath); - return await file.json(); - } else { - // Assume it's a JavaScript/TypeScript module - return await import(configPath); - } - } catch (error) { - logger.error('Failed to load config', { configPath, error }); - throw new Error(`Failed to load config from ${configPath}: ${(error as Error).message}`); - } -} - -function displayResults(result: any): void { - console.log('\n=== Backtest Results ==='); - console.log(`Strategy: ${result.strategy.name}`); - console.log(`Symbol: ${result.symbol}`); - console.log(`Period: ${result.startDate} to ${result.endDate}`); - console.log(`Mode: ${result.mode}`); - console.log(`Duration: ${result.duration}ms`); - - console.log('\n--- Performance ---'); - console.log(`Total Return: ${(result.performance.totalReturn * 100).toFixed(2)}%`); - console.log(`Sharpe Ratio: ${result.performance.sharpeRatio.toFixed(3)}`); - console.log(`Max Drawdown: ${(result.performance.maxDrawdown * 100).toFixed(2)}%`); - console.log(`Win Rate: ${(result.performance.winRate * 100).toFixed(1)}%`); - console.log(`Profit Factor: ${result.performance.profitFactor.toFixed(2)}`); - - console.log('\n--- Trading Stats ---'); - console.log(`Total Trades: ${result.performance.totalTrades}`); - console.log(`Winning Trades: ${result.performance.winningTrades}`); - console.log(`Losing Trades: ${result.performance.losingTrades}`); - console.log(`Average Trade: ${result.performance.avgTrade.toFixed(2)}`); - console.log(`Average Win: ${result.performance.avgWin.toFixed(2)}`); - console.log(`Average Loss: ${result.performance.avgLoss.toFixed(2)}`); - console.log(`Largest Win: ${result.performance.largestWin.toFixed(2)}`); - console.log(`Largest Loss: ${result.performance.largestLoss.toFixed(2)}`); - - if (result.metadata) { - console.log('\n--- Metadata ---'); - Object.entries(result.metadata).forEach(([key, value]) => { - console.log(`${key}: ${Array.isArray(value) ? value.join(', ') : value}`); - }); - } -} - -async function saveResults(result: any, outputPath: string): Promise { - try { - if (outputPath.endsWith('.json')) { - await Bun.write(outputPath, JSON.stringify(result, null, 2)); - } else if (outputPath.endsWith('.csv')) { - const csv = convertTradesToCSV(result.trades); - await Bun.write(outputPath, csv); - } else { - // Default to JSON - await Bun.write(outputPath + '.json', JSON.stringify(result, null, 2)); - } - - logger.info(`\nResults saved to: ${outputPath}`); - } catch (error) { - logger.error('Failed to save results', { outputPath, error }); - } -} - -function convertTradesToCSV(trades: any[]): string { - if (trades.length === 0) return 'No trades executed\n'; - - const headers = Object.keys(trades[0]).join(','); - const rows = trades.map(trade => - Object.values(trade).map(value => - typeof value === 'string' ? `"${value}"` : value - ).join(',') - ); - - return [headers, ...rows].join('\n'); -} - -async function listStrategies(): Promise { - console.log('Available strategies:'); - console.log(' sma_crossover - Simple Moving Average Crossover'); - console.log(' ema_crossover - Exponential Moving Average Crossover'); - console.log(' rsi_mean_reversion - RSI Mean Reversion'); - console.log(' macd_trend - MACD Trend Following'); - console.log(' bollinger_bands - Bollinger Bands Strategy'); - // Add more as they're implemented -} - -async function validateStrategy(strategy: string): Promise { - console.log(`Validating strategy: ${strategy}`); - - // TODO: Add strategy validation logic - // This could check if the strategy exists, has valid parameters, etc. - - const validStrategies = ['sma_crossover', 'ema_crossover', 'rsi_mean_reversion', 'macd_trend', 'bollinger_bands']; - - if (!validStrategies.includes(strategy)) { - console.warn(`Warning: Strategy '${strategy}' is not in the list of known strategies`); - console.log('Use --list-strategies to see available strategies'); - } else { - console.log(`βœ“ Strategy '${strategy}' is valid`); - } -} - -// CLI Commands -program - .name('strategy-cli') - .description('Stock Trading Bot Strategy CLI') - .version('1.0.0'); - -program - .command('backtest') - .description('Run a backtest') - .requiredOption('-s, --strategy ', 'Strategy to test') - .requiredOption('--symbol ', 'Symbol to trade') - .requiredOption('--start-date ', 'Start date (YYYY-MM-DD)') - .requiredOption('--end-date ', 'End date (YYYY-MM-DD)') - .option('-m, --mode ', 'Execution mode', 'vectorized') - .option('-c, --initial-capital ', 'Initial capital', '10000') - .option('--config ', 'Configuration file path') - .option('-o, --output ', 'Output file path') - .option('-v, --verbose', 'Verbose output') - .action(async (options: CLIBacktestConfig) => { - await runBacktest(options); - }); - -program - .command('list-strategies') - .description('List available strategies') - .action(listStrategies); - -program - .command('validate') - .description('Validate a strategy') - .requiredOption('-s, --strategy ', 'Strategy to validate') - .action(async (options: CLIBacktestConfig) => { - await validateStrategy(options.strategy); - }); - -program - .command('compare') - .description('Compare multiple strategies') - .requiredOption('--strategies ', 'Comma-separated list of strategies') - .requiredOption('--symbol ', 'Symbol to trade') - .requiredOption('--start-date ', 'Start date (YYYY-MM-DD)') - .requiredOption('--end-date ', 'End date (YYYY-MM-DD)') - .option('-m, --mode ', 'Execution mode', 'vectorized') - .option('-c, --initial-capital ', 'Initial capital', '10000') - .option('-o, --output ', 'Output directory') - .action(async (options: CLIBacktestConfig) => { - const strategies = options.strategies.split(',').map((s: string) => s.trim()); - console.log(`Comparing strategies: ${strategies.join(', ')}`); - - const results: any[] = []; - - for (const strategy of strategies) { - console.log(`\nRunning ${strategy}...`); - try { - await runBacktest({ - ...options, - strategy, - output: options.output ? `${options.output}/${strategy}.json` : undefined - }); - } catch (error) { - console.error(`Failed to run ${strategy}:`, (error as Error).message); - } - } - - console.log('\nComparison completed!'); - }); - -// Parse command line arguments -program.parse(); - -export { runBacktest, listStrategies, validateStrategy }; +#!/usr/bin/env bun +/** + * Strategy Service CLI + * Command-line interface for running backtests and managing strategies + */ + +import { program } from 'commander'; +import { getLogger } from '@stock-bot/logger'; +import { createEventBus } from '@stock-bot/event-bus'; +import { BacktestContext } from '../framework/execution-mode'; +import { LiveMode } from '../backtesting/modes/live-mode'; +import { EventMode } from '../backtesting/modes/event-mode'; +import VectorizedMode from '../backtesting/modes/vectorized-mode'; +import HybridMode from '../backtesting/modes/hybrid-mode'; + +const logger = getLogger('strategy-cli'); + +interface CLIBacktestConfig { + strategy: string; + strategies: string; + symbol: string; + startDate: string; + endDate: string; + mode: 'live' | 'event' | 'vectorized' | 'hybrid'; + initialCapital?: number; + config?: string; + output?: string; + verbose?: boolean; +} + +async function runBacktest(options: CLIBacktestConfig): Promise { + logger.info('Starting backtest from CLI', { options }); + + try { + // Initialize event bus + const eventBus = createEventBus({ + serviceName: 'strategy-cli', + enablePersistence: false // Disable Redis for CLI + }); + + // Create backtest context + const context: BacktestContext = { + backtestId: `cli_${Date.now()}`, + strategy: { + id: options.strategy, + name: options.strategy, + type: options.strategy, + code: options.strategy, + parameters: {} + }, + symbol: options.symbol, + startDate: options.startDate, + endDate: options.endDate, + initialCapital: options.initialCapital || 10000, + mode: options.mode + }; + + // Load additional config if provided + if (options.config) { + const configData = await loadConfig(options.config); + context.strategy.parameters = { ...context.strategy.parameters, ...configData }; + } + + // Create and execute the appropriate mode + let executionMode; + + switch (options.mode) { + case 'live': + executionMode = new LiveMode(context, eventBus); + break; + case 'event': + executionMode = new EventMode(context, eventBus); + break; + case 'vectorized': + executionMode = new VectorizedMode(context, eventBus); + break; + case 'hybrid': + executionMode = new HybridMode(context, eventBus); + break; + default: + throw new Error(`Unknown execution mode: ${options.mode}`); + } + + // Subscribe to progress updates + eventBus.subscribe('backtest.update', (message) => { + const { backtestId, progress, ...data } = message.data; + console.log(`Progress: ${progress}%`, data); + }); + + await executionMode.initialize(); + const result = await executionMode.execute(); + await executionMode.cleanup(); + + // Display results + displayResults(result); + + // Save results if output specified + if (options.output) { + await saveResults(result, options.output); + } + + await eventBus.close(); + + } catch (error) { + logger.error('Backtest failed', error); + process.exit(1); + } +} + +async function loadConfig(configPath: string): Promise { + try { + if (configPath.endsWith('.json')) { + const file = Bun.file(configPath); + return await file.json(); + } else { + // Assume it's a JavaScript/TypeScript module + return await import(configPath); + } + } catch (error) { + logger.error('Failed to load config', { configPath, error }); + throw new Error(`Failed to load config from ${configPath}: ${(error as Error).message}`); + } +} + +function displayResults(result: any): void { + console.log('\n=== Backtest Results ==='); + console.log(`Strategy: ${result.strategy.name}`); + console.log(`Symbol: ${result.symbol}`); + console.log(`Period: ${result.startDate} to ${result.endDate}`); + console.log(`Mode: ${result.mode}`); + console.log(`Duration: ${result.duration}ms`); + + console.log('\n--- Performance ---'); + console.log(`Total Return: ${(result.performance.totalReturn * 100).toFixed(2)}%`); + console.log(`Sharpe Ratio: ${result.performance.sharpeRatio.toFixed(3)}`); + console.log(`Max Drawdown: ${(result.performance.maxDrawdown * 100).toFixed(2)}%`); + console.log(`Win Rate: ${(result.performance.winRate * 100).toFixed(1)}%`); + console.log(`Profit Factor: ${result.performance.profitFactor.toFixed(2)}`); + + console.log('\n--- Trading Stats ---'); + console.log(`Total Trades: ${result.performance.totalTrades}`); + console.log(`Winning Trades: ${result.performance.winningTrades}`); + console.log(`Losing Trades: ${result.performance.losingTrades}`); + console.log(`Average Trade: ${result.performance.avgTrade.toFixed(2)}`); + console.log(`Average Win: ${result.performance.avgWin.toFixed(2)}`); + console.log(`Average Loss: ${result.performance.avgLoss.toFixed(2)}`); + console.log(`Largest Win: ${result.performance.largestWin.toFixed(2)}`); + console.log(`Largest Loss: ${result.performance.largestLoss.toFixed(2)}`); + + if (result.metadata) { + console.log('\n--- Metadata ---'); + Object.entries(result.metadata).forEach(([key, value]) => { + console.log(`${key}: ${Array.isArray(value) ? value.join(', ') : value}`); + }); + } +} + +async function saveResults(result: any, outputPath: string): Promise { + try { + if (outputPath.endsWith('.json')) { + await Bun.write(outputPath, JSON.stringify(result, null, 2)); + } else if (outputPath.endsWith('.csv')) { + const csv = convertTradesToCSV(result.trades); + await Bun.write(outputPath, csv); + } else { + // Default to JSON + await Bun.write(outputPath + '.json', JSON.stringify(result, null, 2)); + } + + logger.info(`\nResults saved to: ${outputPath}`); + } catch (error) { + logger.error('Failed to save results', { outputPath, error }); + } +} + +function convertTradesToCSV(trades: any[]): string { + if (trades.length === 0) return 'No trades executed\n'; + + const headers = Object.keys(trades[0]).join(','); + const rows = trades.map(trade => + Object.values(trade).map(value => + typeof value === 'string' ? `"${value}"` : value + ).join(',') + ); + + return [headers, ...rows].join('\n'); +} + +async function listStrategies(): Promise { + console.log('Available strategies:'); + console.log(' sma_crossover - Simple Moving Average Crossover'); + console.log(' ema_crossover - Exponential Moving Average Crossover'); + console.log(' rsi_mean_reversion - RSI Mean Reversion'); + console.log(' macd_trend - MACD Trend Following'); + console.log(' bollinger_bands - Bollinger Bands Strategy'); + // Add more as they're implemented +} + +async function validateStrategy(strategy: string): Promise { + console.log(`Validating strategy: ${strategy}`); + + // TODO: Add strategy validation logic + // This could check if the strategy exists, has valid parameters, etc. + + const validStrategies = ['sma_crossover', 'ema_crossover', 'rsi_mean_reversion', 'macd_trend', 'bollinger_bands']; + + if (!validStrategies.includes(strategy)) { + console.warn(`Warning: Strategy '${strategy}' is not in the list of known strategies`); + console.log('Use --list-strategies to see available strategies'); + } else { + console.log(`βœ“ Strategy '${strategy}' is valid`); + } +} + +// CLI Commands +program + .name('strategy-cli') + .description('Stock Trading Bot Strategy CLI') + .version('1.0.0'); + +program + .command('backtest') + .description('Run a backtest') + .requiredOption('-s, --strategy ', 'Strategy to test') + .requiredOption('--symbol ', 'Symbol to trade') + .requiredOption('--start-date ', 'Start date (YYYY-MM-DD)') + .requiredOption('--end-date ', 'End date (YYYY-MM-DD)') + .option('-m, --mode ', 'Execution mode', 'vectorized') + .option('-c, --initial-capital ', 'Initial capital', '10000') + .option('--config ', 'Configuration file path') + .option('-o, --output ', 'Output file path') + .option('-v, --verbose', 'Verbose output') + .action(async (options: CLIBacktestConfig) => { + await runBacktest(options); + }); + +program + .command('list-strategies') + .description('List available strategies') + .action(listStrategies); + +program + .command('validate') + .description('Validate a strategy') + .requiredOption('-s, --strategy ', 'Strategy to validate') + .action(async (options: CLIBacktestConfig) => { + await validateStrategy(options.strategy); + }); + +program + .command('compare') + .description('Compare multiple strategies') + .requiredOption('--strategies ', 'Comma-separated list of strategies') + .requiredOption('--symbol ', 'Symbol to trade') + .requiredOption('--start-date ', 'Start date (YYYY-MM-DD)') + .requiredOption('--end-date ', 'End date (YYYY-MM-DD)') + .option('-m, --mode ', 'Execution mode', 'vectorized') + .option('-c, --initial-capital ', 'Initial capital', '10000') + .option('-o, --output ', 'Output directory') + .action(async (options: CLIBacktestConfig) => { + const strategies = options.strategies.split(',').map((s: string) => s.trim()); + console.log(`Comparing strategies: ${strategies.join(', ')}`); + + const results: any[] = []; + + for (const strategy of strategies) { + console.log(`\nRunning ${strategy}...`); + try { + await runBacktest({ + ...options, + strategy, + output: options.output ? `${options.output}/${strategy}.json` : undefined + }); + } catch (error) { + console.error(`Failed to run ${strategy}:`, (error as Error).message); + } + } + + console.log('\nComparison completed!'); + }); + +// Parse command line arguments +program.parse(); + +export { runBacktest, listStrategies, validateStrategy }; diff --git a/apps/strategy-service/src/framework/execution-mode.ts b/apps/strategy-service/src/framework/execution-mode.ts index d92cd48..914fe8a 100644 --- a/apps/strategy-service/src/framework/execution-mode.ts +++ b/apps/strategy-service/src/framework/execution-mode.ts @@ -1,80 +1,80 @@ -/** - * Execution Mode Framework - * Base classes for different execution modes (live, event-driven, vectorized) - */ -import { getLogger } from '@stock-bot/logger'; - -const logger = getLogger('execution-mode'); - -export interface Order { - id: string; - symbol: string; - side: 'BUY' | 'SELL'; - quantity: number; - type: 'MARKET' | 'LIMIT'; - price?: number; - timestamp: Date; -} - -export interface OrderResult { - orderId: string; - symbol: string; - executedQuantity: number; - executedPrice: number; - commission: number; - slippage: number; - timestamp: Date; - executionTime: number; -} - -export interface MarketData { - symbol: string; - timestamp: Date; - open: number; - high: number; - low: number; - close: number; - volume: number; -} - -export abstract class ExecutionMode { - protected logger = getLogger(this.constructor.name); - - abstract name: string; - abstract executeOrder(order: Order): Promise; - abstract getCurrentTime(): Date; - abstract getMarketData(symbol: string): Promise; - abstract publishEvent(event: string, data: any): Promise; -} - -export enum BacktestMode { - LIVE = 'live', - EVENT_DRIVEN = 'event-driven', - VECTORIZED = 'vectorized', - HYBRID = 'hybrid' -} - -export class ModeFactory { - static create(mode: BacktestMode, config?: any): ExecutionMode { - switch (mode) { - case BacktestMode.LIVE: - // TODO: Import and create LiveMode - throw new Error('LiveMode not implemented yet'); - - case BacktestMode.EVENT_DRIVEN: - // TODO: Import and create EventBacktestMode - throw new Error('EventBacktestMode not implemented yet'); - - case BacktestMode.VECTORIZED: - // TODO: Import and create VectorBacktestMode - throw new Error('VectorBacktestMode not implemented yet'); - - case BacktestMode.HYBRID: - // TODO: Import and create HybridBacktestMode - throw new Error('HybridBacktestMode not implemented yet'); - - default: - throw new Error(`Unknown mode: ${mode}`); - } - } -} +/** + * Execution Mode Framework + * Base classes for different execution modes (live, event-driven, vectorized) + */ +import { getLogger } from '@stock-bot/logger'; + +const logger = getLogger('execution-mode'); + +export interface Order { + id: string; + symbol: string; + side: 'BUY' | 'SELL'; + quantity: number; + type: 'MARKET' | 'LIMIT'; + price?: number; + timestamp: Date; +} + +export interface OrderResult { + orderId: string; + symbol: string; + executedQuantity: number; + executedPrice: number; + commission: number; + slippage: number; + timestamp: Date; + executionTime: number; +} + +export interface MarketData { + symbol: string; + timestamp: Date; + open: number; + high: number; + low: number; + close: number; + volume: number; +} + +export abstract class ExecutionMode { + protected logger = getLogger(this.constructor.name); + + abstract name: string; + abstract executeOrder(order: Order): Promise; + abstract getCurrentTime(): Date; + abstract getMarketData(symbol: string): Promise; + abstract publishEvent(event: string, data: any): Promise; +} + +export enum BacktestMode { + LIVE = 'live', + EVENT_DRIVEN = 'event-driven', + VECTORIZED = 'vectorized', + HYBRID = 'hybrid' +} + +export class ModeFactory { + static create(mode: BacktestMode, config?: any): ExecutionMode { + switch (mode) { + case BacktestMode.LIVE: + // TODO: Import and create LiveMode + throw new Error('LiveMode not implemented yet'); + + case BacktestMode.EVENT_DRIVEN: + // TODO: Import and create EventBacktestMode + throw new Error('EventBacktestMode not implemented yet'); + + case BacktestMode.VECTORIZED: + // TODO: Import and create VectorBacktestMode + throw new Error('VectorBacktestMode not implemented yet'); + + case BacktestMode.HYBRID: + // TODO: Import and create HybridBacktestMode + throw new Error('HybridBacktestMode not implemented yet'); + + default: + throw new Error(`Unknown mode: ${mode}`); + } + } +} diff --git a/apps/strategy-service/src/index.ts b/apps/strategy-service/src/index.ts index 689c995..23926f2 100644 --- a/apps/strategy-service/src/index.ts +++ b/apps/strategy-service/src/index.ts @@ -1,89 +1,89 @@ -/** - * Strategy Service - Multi-mode strategy execution and backtesting - */ -import { getLogger } from '@stock-bot/logger'; -import { loadEnvVariables } from '@stock-bot/config'; -import { Hono } from 'hono'; -import { serve } from '@hono/node-server'; - -// Load environment variables -loadEnvVariables(); - -const app = new Hono(); -const logger = getLogger('strategy-service'); -const PORT = parseInt(process.env.STRATEGY_SERVICE_PORT || '3004'); - -// Health check endpoint -app.get('/health', (c) => { - return c.json({ - service: 'strategy-service', - status: 'healthy', - timestamp: new Date().toISOString() - }); -}); - -// Strategy execution endpoints -app.post('/api/strategy/run', async (c) => { - const body = await c.req.json(); - logger.info('Strategy run request', { - strategy: body.strategy, - mode: body.mode - }); - - // TODO: Implement strategy execution - return c.json({ - message: 'Strategy execution endpoint - not implemented yet', - strategy: body.strategy, - mode: body.mode - }); -}); - -// Backtesting endpoints -app.post('/api/backtest/event', async (c) => { - const body = await c.req.json(); - logger.info('Event-driven backtest request', { strategy: body.strategy }); - - // TODO: Implement event-driven backtesting - return c.json({ - message: 'Event-driven backtest endpoint - not implemented yet' - }); -}); - -app.post('/api/backtest/vector', async (c) => { - const body = await c.req.json(); - logger.info('Vectorized backtest request', { strategy: body.strategy }); - - // TODO: Implement vectorized backtesting - return c.json({ - message: 'Vectorized backtest endpoint - not implemented yet' - }); -}); - -app.post('/api/backtest/hybrid', async (c) => { - const body = await c.req.json(); - logger.info('Hybrid backtest request', { strategy: body.strategy }); - - // TODO: Implement hybrid backtesting - return c.json({ - message: 'Hybrid backtest endpoint - not implemented yet' - }); -}); - -// Parameter optimization endpoint -app.post('/api/optimize', async (c) => { - const body = await c.req.json(); - logger.info('Parameter optimization request', { strategy: body.strategy }); - - // TODO: Implement parameter optimization - return c.json({ - message: 'Parameter optimization endpoint - not implemented yet' - }); -}); - -// Start server -serve({ - fetch: app.fetch, - port: PORT, -}); - -logger.info(`Strategy Service started on port ${PORT}`); +/** + * Strategy Service - Multi-mode strategy execution and backtesting + */ +import { getLogger } from '@stock-bot/logger'; +import { loadEnvVariables } from '@stock-bot/config'; +import { Hono } from 'hono'; +import { serve } from '@hono/node-server'; + +// Load environment variables +loadEnvVariables(); + +const app = new Hono(); +const logger = getLogger('strategy-service'); +const PORT = parseInt(process.env.STRATEGY_SERVICE_PORT || '3004'); + +// Health check endpoint +app.get('/health', (c) => { + return c.json({ + service: 'strategy-service', + status: 'healthy', + timestamp: new Date().toISOString() + }); +}); + +// Strategy execution endpoints +app.post('/api/strategy/run', async (c) => { + const body = await c.req.json(); + logger.info('Strategy run request', { + strategy: body.strategy, + mode: body.mode + }); + + // TODO: Implement strategy execution + return c.json({ + message: 'Strategy execution endpoint - not implemented yet', + strategy: body.strategy, + mode: body.mode + }); +}); + +// Backtesting endpoints +app.post('/api/backtest/event', async (c) => { + const body = await c.req.json(); + logger.info('Event-driven backtest request', { strategy: body.strategy }); + + // TODO: Implement event-driven backtesting + return c.json({ + message: 'Event-driven backtest endpoint - not implemented yet' + }); +}); + +app.post('/api/backtest/vector', async (c) => { + const body = await c.req.json(); + logger.info('Vectorized backtest request', { strategy: body.strategy }); + + // TODO: Implement vectorized backtesting + return c.json({ + message: 'Vectorized backtest endpoint - not implemented yet' + }); +}); + +app.post('/api/backtest/hybrid', async (c) => { + const body = await c.req.json(); + logger.info('Hybrid backtest request', { strategy: body.strategy }); + + // TODO: Implement hybrid backtesting + return c.json({ + message: 'Hybrid backtest endpoint - not implemented yet' + }); +}); + +// Parameter optimization endpoint +app.post('/api/optimize', async (c) => { + const body = await c.req.json(); + logger.info('Parameter optimization request', { strategy: body.strategy }); + + // TODO: Implement parameter optimization + return c.json({ + message: 'Parameter optimization endpoint - not implemented yet' + }); +}); + +// Start server +serve({ + fetch: app.fetch, + port: PORT, +}); + +logger.info(`Strategy Service started on port ${PORT}`); diff --git a/apps/strategy-service/tsconfig.json b/apps/strategy-service/tsconfig.json index 71a36bb..c491766 100644 --- a/apps/strategy-service/tsconfig.json +++ b/apps/strategy-service/tsconfig.json @@ -1,18 +1,18 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], - "references": [ - { "path": "../../libs/types" }, - { "path": "../../libs/config" }, - { "path": "../../libs/logger" }, - { "path": "../../libs/utils" }, - { "path": "../../libs/strategy-engine" }, - { "path": "../../libs/event-bus" }, - { "path": "../../libs/shutdown" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**", "**/tests/**", "**/__tests__/**"], + "references": [ + { "path": "../../libs/types" }, + { "path": "../../libs/config" }, + { "path": "../../libs/logger" }, + { "path": "../../libs/utils" }, + { "path": "../../libs/strategy-engine" }, + { "path": "../../libs/event-bus" }, + { "path": "../../libs/shutdown" } + ] +} diff --git a/apps/strategy-service/turbo.json b/apps/strategy-service/turbo.json index 7acf2e8..16180ef 100644 --- a/apps/strategy-service/turbo.json +++ b/apps/strategy-service/turbo.json @@ -1,18 +1,18 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": [ - "@stock-bot/types#build", - "@stock-bot/config#build", - "@stock-bot/logger#build", - "@stock-bot/utils#build", - "@stock-bot/strategy-engine#build", - "@stock-bot/event-bus#build", - "@stock-bot/shutdown#build" - ], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": [ + "@stock-bot/types#build", + "@stock-bot/config#build", + "@stock-bot/logger#build", + "@stock-bot/utils#build", + "@stock-bot/strategy-engine#build", + "@stock-bot/event-bus#build", + "@stock-bot/shutdown#build" + ], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/bun.lock b/bun.lock index e94cb36..21c4404 100644 --- a/bun.lock +++ b/bun.lock @@ -1,2533 +1,2533 @@ -{ - "lockfileVersion": 1, - "workspaces": { - "": { - "name": "stock-bot", - "dependencies": { - "bullmq": "^5.53.2", - }, - "devDependencies": { - "@testcontainers/mongodb": "^10.7.2", - "@testcontainers/postgresql": "^10.7.2", - "@types/bun": "latest", - "@types/node": "^22.15.30", - "@types/supertest": "^6.0.2", - "@types/yup": "^0.32.0", - "bun-types": "^1.2.15", - "mongodb-memory-server": "^9.1.6", - "pg-mem": "^2.8.1", - "supertest": "^6.3.4", - "turbo": "^2.5.4", - "typescript": "^5.8.3", - "yup": "^1.6.1", - }, - }, - "apps/dashboard": { - "name": "trading-dashboard", - "version": "0.0.0", - "dependencies": { - "@angular/animations": "^20.0.0", - "@angular/cdk": "^20.0.1", - "@angular/common": "^20.0.0", - "@angular/compiler": "^20.0.0", - "@angular/core": "^20.0.0", - "@angular/forms": "^20.0.0", - "@angular/material": "^20.0.1", - "@angular/platform-browser": "^20.0.0", - "@angular/router": "^20.0.0", - "rxjs": "~7.8.2", - "tslib": "^2.8.1", - "zone.js": "~0.15.1", - }, - "devDependencies": { - "@angular/build": "^20.0.0", - "@angular/cli": "^20.0.0", - "@angular/compiler-cli": "^20.0.0", - "@tailwindcss/postcss": "^4.1.8", - "@types/jasmine": "~5.1.8", - "autoprefixer": "^10.4.21", - "jasmine-core": "~5.7.1", - "karma": "~6.4.4", - "karma-chrome-launcher": "~3.2.0", - "karma-coverage": "~2.2.1", - "karma-jasmine": "~5.1.0", - "karma-jasmine-html-reporter": "~2.1.0", - "postcss": "^8.5.4", - "tailwindcss": "^4.1.8", - "typescript": "~5.8.3", - }, - }, - "apps/data-service": { - "name": "@stock-bot/data-service", - "version": "1.0.0", - "dependencies": { - "@stock-bot/cache": "*", - "@stock-bot/config": "*", - "@stock-bot/event-bus": "*", - "@stock-bot/http": "*", - "@stock-bot/logger": "*", - "@stock-bot/mongodb-client": "*", - "@stock-bot/questdb-client": "*", - "@stock-bot/shutdown": "*", - "@stock-bot/types": "*", - "bullmq": "^5.53.2", - "hono": "^4.0.0", - "p-limit": "^6.2.0", - "ws": "^8.0.0", - }, - "devDependencies": { - "@types/ws": "^8.0.0", - "typescript": "^5.0.0", - }, - }, - "apps/execution-service": { - "name": "@stock-bot/execution-service", - "version": "1.0.0", - "dependencies": { - "@hono/node-server": "^1.12.0", - "@stock-bot/config": "*", - "@stock-bot/event-bus": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "@stock-bot/utils": "*", - "hono": "^4.6.1", - }, - "devDependencies": { - "@types/node": "^22.5.0", - "typescript": "^5.5.4", - }, - }, - "apps/portfolio-service": { - "name": "@stock-bot/portfolio-service", - "version": "1.0.0", - "dependencies": { - "@hono/node-server": "^1.12.0", - "@stock-bot/config": "*", - "@stock-bot/data-frame": "*", - "@stock-bot/logger": "*", - "@stock-bot/questdb-client": "*", - "@stock-bot/types": "*", - "@stock-bot/utils": "*", - "hono": "^4.6.1", - }, - "devDependencies": { - "@types/node": "^22.5.0", - "typescript": "^5.5.4", - }, - }, - "apps/processing-service": { - "name": "@stock-bot/processing-service", - "version": "1.0.0", - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/event-bus": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "@stock-bot/utils": "*", - "@stock-bot/vector-engine": "*", - "hono": "^4.0.0", - }, - "devDependencies": { - "typescript": "^5.0.0", - }, - }, - "apps/strategy-service": { - "name": "@stock-bot/strategy-service", - "version": "1.0.0", - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/data-frame": "*", - "@stock-bot/event-bus": "*", - "@stock-bot/logger": "*", - "@stock-bot/questdb-client": "*", - "@stock-bot/strategy-engine": "*", - "@stock-bot/types": "*", - "@stock-bot/utils": "*", - "@stock-bot/vector-engine": "*", - "commander": "^11.0.0", - "hono": "^4.0.0", - }, - "devDependencies": { - "@types/node": "^20.0.0", - "typescript": "^5.0.0", - }, - }, - "libs/cache": { - "name": "@stock-bot/cache", - "version": "1.0.0", - "dependencies": { - "ioredis": "^5.3.2", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "bun-types": "^1.2.15", - "typescript": "^5.3.0", - }, - }, - "libs/config": { - "name": "@stock-bot/config", - "version": "1.0.0", - "dependencies": { - "dotenv": "^16.5.0", - "yup": "^1.6.1", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15", - "eslint": "^8.56.0", - "typescript": "^5.3.0", - }, - }, - "libs/data-adjustments": { - "name": "@stock-bot/data-adjustments", - "version": "1.0.0", - "dependencies": { - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - }, - "devDependencies": { - "bun-types": "^1.1.12", - "typescript": "^5.4.5", - }, - "peerDependencies": { - "typescript": "^5.0.0", - }, - }, - "libs/data-frame": { - "name": "@stock-bot/data-frame", - "version": "1.0.0", - "dependencies": { - "@stock-bot/logger": "*", - "@stock-bot/utils": "*", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "bun-types": "^1.2.15", - "typescript": "^5.3.0", - }, - }, - "libs/event-bus": { - "name": "@stock-bot/event-bus", - "version": "1.0.0", - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "eventemitter3": "^5.0.1", - "ioredis": "^5.3.2", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "bun-types": "^1.2.15", - "typescript": "^5.3.0", - }, - }, - "libs/http": { - "name": "@stock-bot/http", - "version": "1.0.0", - "dependencies": { - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "axios": "^1.9.0", - "http-proxy-agent": "^7.0.2", - "https-proxy-agent": "^7.0.6", - "socks-proxy-agent": "^8.0.5", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15", - "eslint": "^8.56.0", - "typescript": "^5.3.0", - }, - }, - "libs/logger": { - "name": "@stock-bot/logger", - "version": "1.0.0", - "dependencies": { - "@stock-bot/config": "*", - "got": "^14.4.7", - "pino": "^9.7.0", - "pino-loki": "^2.6.0", - "pino-pretty": "^13.0.0", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "bun-types": "^1.2.15", - "typescript": "^5.3.0", - }, - }, - "libs/mongodb-client": { - "name": "@stock-bot/mongodb-client", - "version": "1.0.0", - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "mongodb": "^6.17.0", - "yup": "^1.6.1", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15", - "eslint": "^8.56.0", - "typescript": "^5.3.0", - }, - }, - "libs/postgres-client": { - "name": "@stock-bot/postgres-client", - "version": "1.0.0", - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "pg": "^8.11.3", - "yup": "^1.6.1", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "@types/pg": "^8.10.7", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15", - "eslint": "^8.56.0", - "typescript": "^5.3.0", - }, - }, - "libs/questdb-client": { - "name": "@stock-bot/questdb-client", - "version": "1.0.0", - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15", - "eslint": "^8.56.0", - "typescript": "^5.3.0", - }, - }, - "libs/shutdown": { - "name": "@stock-bot/shutdown", - "version": "1.0.0", - "devDependencies": { - "@types/node": "^20.0.0", - "typescript": "^5.0.0", - }, - }, - "libs/strategy-engine": { - "name": "@stock-bot/strategy-engine", - "version": "1.0.0", - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/data-frame": "*", - "@stock-bot/event-bus": "*", - "@stock-bot/logger": "*", - "@stock-bot/utils": "*", - "commander": "^14.0.0", - "eventemitter3": "^5.0.1", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "bun-types": "^1.2.15", - "typescript": "^5.3.0", - }, - }, - "libs/types": { - "name": "@stock-bot/types", - "version": "1.0.0", - "devDependencies": { - "@types/node": "^20.11.0", - "bun-types": "^1.2.15", - "typescript": "^5.3.0", - }, - }, - "libs/utils": { - "name": "@stock-bot/utils", - "version": "1.0.0", - "dependencies": { - "@stock-bot/types": "*", - "date-fns": "^2.30.0", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "bun-types": "^1.2.15", - "typescript": "^5.3.0", - }, - }, - "libs/vector-engine": { - "name": "@stock-bot/vector-engine", - "version": "1.0.0", - "dependencies": { - "@stock-bot/data-frame": "*", - "@stock-bot/logger": "*", - "@stock-bot/utils": "*", - }, - "devDependencies": { - "@types/node": "^20.11.0", - "bun-types": "^1.2.15", - "typescript": "^5.3.0", - }, - }, - }, - "trustedDependencies": [ - "mongodb", - ], - "packages": { - "@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="], - - "@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="], - - "@angular-devkit/architect": ["@angular-devkit/architect@0.2000.1", "", { "dependencies": { "@angular-devkit/core": "20.0.1", "rxjs": "7.8.2" } }, "sha512-EcOGU1xEhARYpDF391VaeUg/+YRym9OxzJMcc0rSHl3YLK8/m+24ap2YAQY5N7n9+mmEqHVu/q31ldFpOoMCTw=="], - - "@angular-devkit/core": ["@angular-devkit/core@20.0.1", "", { "dependencies": { "ajv": "8.17.1", "ajv-formats": "3.0.1", "jsonc-parser": "3.3.1", "picomatch": "4.0.2", "rxjs": "7.8.2", "source-map": "0.7.4" }, "peerDependencies": { "chokidar": "^4.0.0" }, "optionalPeers": ["chokidar"] }, "sha512-Ilafyj8JVwq3NZsaiGw5UDkP4EAkGKiEvZ4TC3WVidZbM4EpKt9/Jd7ZpsTRGDLG429U+fGhay+ZQeCFGqy5rA=="], - - "@angular-devkit/schematics": ["@angular-devkit/schematics@20.0.1", "", { "dependencies": { "@angular-devkit/core": "20.0.1", "jsonc-parser": "3.3.1", "magic-string": "0.30.17", "ora": "8.2.0", "rxjs": "7.8.2" } }, "sha512-bSr/5YIdjtwKYqylkYrlOVP+tuFz+tfOldmLfWHAsDGnJUznb5t4ckx6yyROp+iDQfu2Aez09p+l4KfUBq+H9A=="], - - "@angular/animations": ["@angular/animations@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/common": "20.0.2", "@angular/core": "20.0.2" } }, "sha512-p9TqZdVOFWMF75lfxk++5GZOBGO3K7qVdAXiQw89VLac8yqsu9iXFlcq34x256McHxONTjrrKBeP5oU1T8rxCw=="], - - "@angular/build": ["@angular/build@20.0.1", "", { "dependencies": { "@ampproject/remapping": "2.3.0", "@angular-devkit/architect": "0.2000.1", "@babel/core": "7.27.1", "@babel/helper-annotate-as-pure": "7.27.1", "@babel/helper-split-export-declaration": "7.24.7", "@inquirer/confirm": "5.1.10", "@vitejs/plugin-basic-ssl": "2.0.0", "beasties": "0.3.4", "browserslist": "^4.23.0", "esbuild": "0.25.5", "https-proxy-agent": "7.0.6", "istanbul-lib-instrument": "6.0.3", "jsonc-parser": "3.3.1", "listr2": "8.3.3", "magic-string": "0.30.17", "mrmime": "2.0.1", "parse5-html-rewriting-stream": "7.1.0", "picomatch": "4.0.2", "piscina": "5.0.0", "rollup": "4.40.2", "sass": "1.88.0", "semver": "7.7.2", "source-map-support": "0.5.21", "tinyglobby": "0.2.13", "vite": "6.3.5", "watchpack": "2.4.2" }, "optionalDependencies": { "lmdb": "3.3.0" }, "peerDependencies": { "@angular/compiler": "^20.0.0", "@angular/compiler-cli": "^20.0.0", "@angular/core": "^20.0.0", "@angular/localize": "^20.0.0", "@angular/platform-browser": "^20.0.0", "@angular/platform-server": "^20.0.0", "@angular/service-worker": "^20.0.0", "@angular/ssr": "^20.0.1", "karma": "^6.4.0", "less": "^4.2.0", "ng-packagr": "^20.0.0", "postcss": "^8.4.0", "tailwindcss": "^2.0.0 || ^3.0.0 || ^4.0.0", "tslib": "^2.3.0", "typescript": ">=5.8 <5.9", "vitest": "^3.1.1" }, "optionalPeers": ["@angular/core", "@angular/localize", "@angular/platform-browser", "@angular/platform-server", "@angular/service-worker", "@angular/ssr", "karma", "less", "ng-packagr", "postcss", "tailwindcss", "vitest"] }, "sha512-m/0jtXIeOaoU/WXtMLRuvq7UaGRxNHpoRKVVoJrifvZuNBYGM4e2lzxlIlo8kiQhPpZQc0zcAMoosbmzKKdkUQ=="], - - "@angular/cdk": ["@angular/cdk@20.0.2", "", { "dependencies": { "parse5": "^7.1.2", "tslib": "^2.3.0" }, "peerDependencies": { "@angular/common": "^20.0.0 || ^21.0.0", "@angular/core": "^20.0.0 || ^21.0.0", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-gRQcpTNhnwBxXSmpnrljODUHQmB2Hnxc6L2Ad6mSMV+c3opd9KIFxL5eG2WOOPHGAaPrV4gNFw+t1i01U4grTg=="], - - "@angular/cli": ["@angular/cli@20.0.1", "", { "dependencies": { "@angular-devkit/architect": "0.2000.1", "@angular-devkit/core": "20.0.1", "@angular-devkit/schematics": "20.0.1", "@inquirer/prompts": "7.5.1", "@listr2/prompt-adapter-inquirer": "2.0.22", "@schematics/angular": "20.0.1", "@yarnpkg/lockfile": "1.1.0", "ini": "5.0.0", "jsonc-parser": "3.3.1", "listr2": "8.3.3", "npm-package-arg": "12.0.2", "npm-pick-manifest": "10.0.0", "pacote": "21.0.0", "resolve": "1.22.10", "semver": "7.7.2", "yargs": "17.7.2" }, "bin": { "ng": "bin/ng.js" } }, "sha512-OU91byvG/WsDDUVmXIJr3/sU89U6g8G8IXrqgVRVPgjXKEQMnUNBlmygD2rMUR5C02g2lGc6s2j0hnOJ/dDNOw=="], - - "@angular/common": ["@angular/common@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/core": "20.0.2", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-dqzKFL2MgPpQiaY9ZyDhGZYWEXblsqofW6czH/+HkmlNgSmDCBaY/UhNQShxNQ0KQbR1o08OWuQr29zxkY1CMA=="], - - "@angular/compiler": ["@angular/compiler@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" } }, "sha512-BJYXGUZaY9awYvgt0w9TDq73A1+m8W5eMRn/krWeQcfWakwTgs27BSxmhfJhD45KrMrky5yxAvGgqSfMKrLeng=="], - - "@angular/compiler-cli": ["@angular/compiler-cli@20.0.2", "", { "dependencies": { "@babel/core": "7.27.4", "@jridgewell/sourcemap-codec": "^1.4.14", "chokidar": "^4.0.0", "convert-source-map": "^1.5.1", "reflect-metadata": "^0.2.0", "semver": "^7.0.0", "tslib": "^2.3.0", "yargs": "^18.0.0" }, "peerDependencies": { "@angular/compiler": "20.0.2", "typescript": ">=5.8 <5.9" }, "optionalPeers": ["typescript"], "bin": { "ngc": "bundles/src/bin/ngc.js", "ng-xi18n": "bundles/src/bin/ng_xi18n.js" } }, "sha512-kVKHS5ZRadTR+rRuBl3Dsccsv/jiHXdJJYlDQwQW87afd4RtAu75P3RsSd8jaUj+7P9O4Ve4vwCZVtgOh0yxbw=="], - - "@angular/core": ["@angular/core@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/compiler": "20.0.2", "rxjs": "^6.5.3 || ^7.4.0", "zone.js": "~0.15.0" }, "optionalPeers": ["@angular/compiler", "zone.js"] }, "sha512-z9L8WPrHTkfupHtpO6aW4KqcqigIhxcQwCaEMgXWc5WJkoiMJSfo/dk+cyiGjCfTkc5Y6DO6f6ERi0IWYWWbPA=="], - - "@angular/forms": ["@angular/forms@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/common": "20.0.2", "@angular/core": "20.0.2", "@angular/platform-browser": "20.0.2", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-RrQKwzFZsEDXsvesNXS4XxndEKZHC+VexIdRr1vlxx7isfvpl4htOxceW0D+Gvku1mnaS99eB/AWS50HxW3B3Q=="], - - "@angular/material": ["@angular/material@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/cdk": "20.0.2", "@angular/common": "^20.0.0 || ^21.0.0", "@angular/core": "^20.0.0 || ^21.0.0", "@angular/forms": "^20.0.0 || ^21.0.0", "@angular/platform-browser": "^20.0.0 || ^21.0.0", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-yIXvF+LjFdHjJWyvn1SxbWB9LdNxYnqEKbKzminW4WPXlPJMOAeyhEDFeQv9W92Zv+/ibS4tI3/SD759ejb45g=="], - - "@angular/platform-browser": ["@angular/platform-browser@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/animations": "20.0.2", "@angular/common": "20.0.2", "@angular/core": "20.0.2" }, "optionalPeers": ["@angular/animations"] }, "sha512-4adMQSVlwxjY9z/LEk3Q5hr4/qbM9UD9FcqbyZOt3+BL+F2GwGdKzwg6Dj4Dv0Tv8/dudNSVgHc8lIdQ4C7K1w=="], - - "@angular/router": ["@angular/router@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/common": "20.0.2", "@angular/core": "20.0.2", "@angular/platform-browser": "20.0.2", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-UyuTeoXkkZw1eFFNwrTfb1JXow6HKVdLNb3n9MhqDz+3ekdiqDH8EBaKhxYZxlcpNoa6cNbECZJYtaHy1lw38g=="], - - "@babel/code-frame": ["@babel/code-frame@7.27.1", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg=="], - - "@babel/compat-data": ["@babel/compat-data@7.27.5", "", {}, "sha512-KiRAp/VoJaWkkte84TvUd9qjdbZAdiqyvMxrGl1N6vzFogKmaLgoM3L1kgtLicp2HP5fBJS8JrZKLVIZGVJAVg=="], - - "@babel/core": ["@babel/core@7.27.1", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.1", "@babel/helper-compilation-targets": "^7.27.1", "@babel/helper-module-transforms": "^7.27.1", "@babel/helpers": "^7.27.1", "@babel/parser": "^7.27.1", "@babel/template": "^7.27.1", "@babel/traverse": "^7.27.1", "@babel/types": "^7.27.1", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-IaaGWsQqfsQWVLqMn9OB92MNN7zukfVA4s7KKAI0KfrrDsZ0yhi5uV4baBuLuN7n3vsZpwP8asPPcVwApxvjBQ=="], - - "@babel/generator": ["@babel/generator@7.27.5", "", { "dependencies": { "@babel/parser": "^7.27.5", "@babel/types": "^7.27.3", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^3.0.2" } }, "sha512-ZGhA37l0e/g2s1Cnzdix0O3aLYm66eF8aufiVteOgnwxgnRP8GoyMj7VWsgWnQbVKXyge7hqrFh2K2TQM6t1Hw=="], - - "@babel/helper-annotate-as-pure": ["@babel/helper-annotate-as-pure@7.27.1", "", { "dependencies": { "@babel/types": "^7.27.1" } }, "sha512-WnuuDILl9oOBbKnb4L+DyODx7iC47XfzmNCpTttFsSp6hTG7XZxu60+4IO+2/hPfcGOoKbFiwoI/+zwARbNQow=="], - - "@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.27.2", "", { "dependencies": { "@babel/compat-data": "^7.27.2", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" } }, "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ=="], - - "@babel/helper-module-imports": ["@babel/helper-module-imports@7.27.1", "", { "dependencies": { "@babel/traverse": "^7.27.1", "@babel/types": "^7.27.1" } }, "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w=="], - - "@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.27.3", "", { "dependencies": { "@babel/helper-module-imports": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1", "@babel/traverse": "^7.27.3" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg=="], - - "@babel/helper-split-export-declaration": ["@babel/helper-split-export-declaration@7.24.7", "", { "dependencies": { "@babel/types": "^7.24.7" } }, "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA=="], - - "@babel/helper-string-parser": ["@babel/helper-string-parser@7.27.1", "", {}, "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA=="], - - "@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.27.1", "", {}, "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow=="], - - "@babel/helper-validator-option": ["@babel/helper-validator-option@7.27.1", "", {}, "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg=="], - - "@babel/helpers": ["@babel/helpers@7.27.6", "", { "dependencies": { "@babel/template": "^7.27.2", "@babel/types": "^7.27.6" } }, "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug=="], - - "@babel/parser": ["@babel/parser@7.27.5", "", { "dependencies": { "@babel/types": "^7.27.3" }, "bin": "./bin/babel-parser.js" }, "sha512-OsQd175SxWkGlzbny8J3K8TnnDD0N3lrIUtB92xwyRpzaenGZhxDvxN/JgU00U3CDZNj9tPuDJ5H0WS4Nt3vKg=="], - - "@babel/runtime": ["@babel/runtime@7.27.6", "", {}, "sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q=="], - - "@babel/template": ["@babel/template@7.27.2", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/parser": "^7.27.2", "@babel/types": "^7.27.1" } }, "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw=="], - - "@babel/traverse": ["@babel/traverse@7.27.4", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/types": "^7.27.3", "debug": "^4.3.1", "globals": "^11.1.0" } }, "sha512-oNcu2QbHqts9BtOWJosOVJapWjBDSxGCpFvikNR5TGDYDQf3JwpIoMzIKrvfoti93cLfPJEG4tH9SPVeyCGgdA=="], - - "@babel/types": ["@babel/types@7.27.6", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1" } }, "sha512-ETyHEk2VHHvl9b9jZP5IHPavHYk57EhanlRRuae9XCpb/j5bDCbPPMOBfCWhnl/7EDJz0jEMCi/RhccCE8r1+Q=="], - - "@balena/dockerignore": ["@balena/dockerignore@1.0.2", "", {}, "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q=="], - - "@colors/colors": ["@colors/colors@1.5.0", "", {}, "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ=="], - - "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA=="], - - "@esbuild/android-arm": ["@esbuild/android-arm@0.25.5", "", { "os": "android", "cpu": "arm" }, "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA=="], - - "@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.5", "", { "os": "android", "cpu": "arm64" }, "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg=="], - - "@esbuild/android-x64": ["@esbuild/android-x64@0.25.5", "", { "os": "android", "cpu": "x64" }, "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw=="], - - "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ=="], - - "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.5", "", { "os": "darwin", "cpu": "x64" }, "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ=="], - - "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.5", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw=="], - - "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.5", "", { "os": "freebsd", "cpu": "x64" }, "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw=="], - - "@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.5", "", { "os": "linux", "cpu": "arm" }, "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw=="], - - "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg=="], - - "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.5", "", { "os": "linux", "cpu": "ia32" }, "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA=="], - - "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.5", "", { "os": "linux", "cpu": "none" }, "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg=="], - - "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.5", "", { "os": "linux", "cpu": "none" }, "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg=="], - - "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.5", "", { "os": "linux", "cpu": "ppc64" }, "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ=="], - - "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.5", "", { "os": "linux", "cpu": "none" }, "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA=="], - - "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.5", "", { "os": "linux", "cpu": "s390x" }, "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ=="], - - "@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.5", "", { "os": "linux", "cpu": "x64" }, "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw=="], - - "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.5", "", { "os": "none", "cpu": "arm64" }, "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw=="], - - "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.5", "", { "os": "none", "cpu": "x64" }, "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ=="], - - "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.5", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw=="], - - "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.5", "", { "os": "openbsd", "cpu": "x64" }, "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg=="], - - "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.5", "", { "os": "sunos", "cpu": "x64" }, "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA=="], - - "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw=="], - - "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ=="], - - "@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.5", "", { "os": "win32", "cpu": "x64" }, "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g=="], - - "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.7.0", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw=="], - - "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.1", "", {}, "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="], - - "@eslint/eslintrc": ["@eslint/eslintrc@2.1.4", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^9.6.0", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ=="], - - "@eslint/js": ["@eslint/js@8.57.1", "", {}, "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q=="], - - "@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="], - - "@grpc/grpc-js": ["@grpc/grpc-js@1.13.4", "", { "dependencies": { "@grpc/proto-loader": "^0.7.13", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg=="], - - "@grpc/proto-loader": ["@grpc/proto-loader@0.7.15", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.2.5", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ=="], - - "@hono/node-server": ["@hono/node-server@1.14.4", "", { "peerDependencies": { "hono": "^4" } }, "sha512-DnxpshhYewr2q9ZN8ez/M5mmc3sucr8CT1sIgIy1bkeUXut9XWDkqHoFHRhWIQgkYnKpVRxunyhK7WzpJeJ6qQ=="], - - "@humanwhocodes/config-array": ["@humanwhocodes/config-array@0.13.0", "", { "dependencies": { "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" } }, "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw=="], - - "@humanwhocodes/module-importer": ["@humanwhocodes/module-importer@1.0.1", "", {}, "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="], - - "@humanwhocodes/object-schema": ["@humanwhocodes/object-schema@2.0.3", "", {}, "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA=="], - - "@inquirer/checkbox": ["@inquirer/checkbox@4.1.8", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-d/QAsnwuHX2OPolxvYcgSj7A9DO9H6gVOy2DvBTx+P2LH2iRTo/RSGV3iwCzW024nP9hw98KIuDmdyhZQj1UQg=="], - - "@inquirer/confirm": ["@inquirer/confirm@5.1.10", "", { "dependencies": { "@inquirer/core": "^10.1.11", "@inquirer/type": "^3.0.6" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-FxbQ9giWxUWKUk2O5XZ6PduVnH2CZ/fmMKMBkH71MHJvWr7WL5AHKevhzF1L5uYWB2P548o1RzVxrNd3dpmk6g=="], - - "@inquirer/core": ["@inquirer/core@10.1.13", "", { "dependencies": { "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "cli-width": "^4.1.0", "mute-stream": "^2.0.0", "signal-exit": "^4.1.0", "wrap-ansi": "^6.2.0", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-1viSxebkYN2nJULlzCxES6G9/stgHSepZ9LqqfdIGPHj5OHhiBUXVS0a6R0bEC2A+VL4D9w6QB66ebCr6HGllA=="], - - "@inquirer/editor": ["@inquirer/editor@4.2.13", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7", "external-editor": "^3.1.0" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-WbicD9SUQt/K8O5Vyk9iC2ojq5RHoCLK6itpp2fHsWe44VxxcA9z3GTWlvjSTGmMQpZr+lbVmrxdHcumJoLbMA=="], - - "@inquirer/expand": ["@inquirer/expand@4.0.15", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-4Y+pbr/U9Qcvf+N/goHzPEXiHH8680lM3Dr3Y9h9FFw4gHS+zVpbj8LfbKWIb/jayIB4aSO4pWiBTrBYWkvi5A=="], - - "@inquirer/figures": ["@inquirer/figures@1.0.12", "", {}, "sha512-MJttijd8rMFcKJC8NYmprWr6hD3r9Gd9qUC0XwPNwoEPWSMVJwA2MlXxF+nhZZNMY+HXsWa+o7KY2emWYIn0jQ=="], - - "@inquirer/input": ["@inquirer/input@4.1.12", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-xJ6PFZpDjC+tC1P8ImGprgcsrzQRsUh9aH3IZixm1lAZFK49UGHxM3ltFfuInN2kPYNfyoPRh+tU4ftsjPLKqQ=="], - - "@inquirer/number": ["@inquirer/number@3.0.15", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-xWg+iYfqdhRiM55MvqiTCleHzszpoigUpN5+t1OMcRkJrUrw7va3AzXaxvS+Ak7Gny0j2mFSTv2JJj8sMtbV2g=="], - - "@inquirer/password": ["@inquirer/password@4.0.15", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-75CT2p43DGEnfGTaqFpbDC2p2EEMrq0S+IRrf9iJvYreMy5mAWj087+mdKyLHapUEPLjN10mNvABpGbk8Wdraw=="], - - "@inquirer/prompts": ["@inquirer/prompts@7.5.1", "", { "dependencies": { "@inquirer/checkbox": "^4.1.6", "@inquirer/confirm": "^5.1.10", "@inquirer/editor": "^4.2.11", "@inquirer/expand": "^4.0.13", "@inquirer/input": "^4.1.10", "@inquirer/number": "^3.0.13", "@inquirer/password": "^4.0.13", "@inquirer/rawlist": "^4.1.1", "@inquirer/search": "^3.0.13", "@inquirer/select": "^4.2.1" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-5AOrZPf2/GxZ+SDRZ5WFplCA2TAQgK3OYrXCYmJL5NaTu4ECcoWFlfUZuw7Es++6Njv7iu/8vpYJhuzxUH76Vg=="], - - "@inquirer/rawlist": ["@inquirer/rawlist@4.1.3", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-7XrV//6kwYumNDSsvJIPeAqa8+p7GJh7H5kRuxirct2cgOcSWwwNGoXDRgpNFbY/MG2vQ4ccIWCi8+IXXyFMZA=="], - - "@inquirer/search": ["@inquirer/search@3.0.15", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-YBMwPxYBrADqyvP4nNItpwkBnGGglAvCLVW8u4pRmmvOsHUtCAUIMbUrLX5B3tFL1/WsLGdQ2HNzkqswMs5Uaw=="], - - "@inquirer/select": ["@inquirer/select@4.2.3", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-OAGhXU0Cvh0PhLz9xTF/kx6g6x+sP+PcyTiLvCrewI99P3BBeexD+VbuwkNDvqGkk3y2h5ZiWLeRP7BFlhkUDg=="], - - "@inquirer/type": ["@inquirer/type@3.0.7", "", { "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-PfunHQcjwnju84L+ycmcMKB/pTPIngjUJvfnRhKY6FKPuYXlM4aQCb/nIdTFR6BEhMjFvngzvng/vBAJMZpLSA=="], - - "@ioredis/commands": ["@ioredis/commands@1.2.0", "", {}, "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg=="], - - "@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], - - "@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="], - - "@istanbuljs/schema": ["@istanbuljs/schema@0.1.3", "", {}, "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA=="], - - "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="], - - "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], - - "@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="], - - "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="], - - "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], - - "@js-sdsl/ordered-map": ["@js-sdsl/ordered-map@4.4.2", "", {}, "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="], - - "@listr2/prompt-adapter-inquirer": ["@listr2/prompt-adapter-inquirer@2.0.22", "", { "dependencies": { "@inquirer/type": "^1.5.5" }, "peerDependencies": { "@inquirer/prompts": ">= 3 < 8" } }, "sha512-hV36ZoY+xKL6pYOt1nPNnkciFkn89KZwqLhAFzJvYysAvL5uBQdiADZx/8bIDXIukzzwG0QlPYolgMzQUtKgpQ=="], - - "@lmdb/lmdb-darwin-arm64": ["@lmdb/lmdb-darwin-arm64@3.3.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-LipbQobyEfQtu8WixasaFUZZ+JCGlho4OWwWIQ5ol0rB1RKkcZvypu7sS1CBvofBGVAa3vbOh8IOGQMrbmL5dg=="], - - "@lmdb/lmdb-darwin-x64": ["@lmdb/lmdb-darwin-x64@3.3.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-yA+9P+ZeA3vg76BLXWeUomIAjxfmSmR2eg8fueHXDg5Xe1Xmkl9JCKuHXUhtJ+mMVcH12d5k4kJBLbyXTadfGQ=="], - - "@lmdb/lmdb-linux-arm": ["@lmdb/lmdb-linux-arm@3.3.0", "", { "os": "linux", "cpu": "arm" }, "sha512-EDYrW9kle+8wI19JCj/PhRnGoCN9bked5cdOPdo1wdgH/HzjgoLPFTn9DHlZccgTEVhp3O+bpWXdN/rWySVvjw=="], - - "@lmdb/lmdb-linux-arm64": ["@lmdb/lmdb-linux-arm64@3.3.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-OeWvSgjXXZ/zmtLqqL78I3910F6UYpUubmsUU+iBHo6nTtjkpXms95rJtGrjkWQqwswKBD7xSMplbYC4LEsiPA=="], - - "@lmdb/lmdb-linux-x64": ["@lmdb/lmdb-linux-x64@3.3.0", "", { "os": "linux", "cpu": "x64" }, "sha512-wDd02mt5ScX4+xd6g78zKBr6ojpgCJCTrllCAabjgap5FzuETqOqaQfKhO+tJuGWv/J5q+GIds6uY7rNFueOxg=="], - - "@lmdb/lmdb-win32-arm64": ["@lmdb/lmdb-win32-arm64@3.3.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-COotWhHJgzXULLiEjOgWQwqig6PoA+6ji6W+sDl6M1HhMXWIymEVHGs0edsVSNtsNSCAWMxJgR3asv6FNX/2EA=="], - - "@lmdb/lmdb-win32-x64": ["@lmdb/lmdb-win32-x64@3.3.0", "", { "os": "win32", "cpu": "x64" }, "sha512-kqUgQH+l8HDbkAapx+aoko7Ez4X4DqkIraOqY/k0QY5EN/iialVlFpBUXh4wFXzirdmEVjbIUMrceUh0Kh8LeA=="], - - "@mongodb-js/saslprep": ["@mongodb-js/saslprep@1.2.2", "", { "dependencies": { "sparse-bitfield": "^3.0.3" } }, "sha512-EB0O3SCSNRUFk66iRCpI+cXzIjdswfCs7F6nOC3RAGJ7xr5YhaicvsRwJ9eyzYvYRlCSDUO/c7g4yNulxKC1WA=="], - - "@msgpackr-extract/msgpackr-extract-darwin-arm64": ["@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw=="], - - "@msgpackr-extract/msgpackr-extract-darwin-x64": ["@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw=="], - - "@msgpackr-extract/msgpackr-extract-linux-arm": ["@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3", "", { "os": "linux", "cpu": "arm" }, "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw=="], - - "@msgpackr-extract/msgpackr-extract-linux-arm64": ["@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg=="], - - "@msgpackr-extract/msgpackr-extract-linux-x64": ["@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3", "", { "os": "linux", "cpu": "x64" }, "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg=="], - - "@msgpackr-extract/msgpackr-extract-win32-x64": ["@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3", "", { "os": "win32", "cpu": "x64" }, "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ=="], - - "@napi-rs/nice": ["@napi-rs/nice@1.0.1", "", { "optionalDependencies": { "@napi-rs/nice-android-arm-eabi": "1.0.1", "@napi-rs/nice-android-arm64": "1.0.1", "@napi-rs/nice-darwin-arm64": "1.0.1", "@napi-rs/nice-darwin-x64": "1.0.1", "@napi-rs/nice-freebsd-x64": "1.0.1", "@napi-rs/nice-linux-arm-gnueabihf": "1.0.1", "@napi-rs/nice-linux-arm64-gnu": "1.0.1", "@napi-rs/nice-linux-arm64-musl": "1.0.1", "@napi-rs/nice-linux-ppc64-gnu": "1.0.1", "@napi-rs/nice-linux-riscv64-gnu": "1.0.1", "@napi-rs/nice-linux-s390x-gnu": "1.0.1", "@napi-rs/nice-linux-x64-gnu": "1.0.1", "@napi-rs/nice-linux-x64-musl": "1.0.1", "@napi-rs/nice-win32-arm64-msvc": "1.0.1", "@napi-rs/nice-win32-ia32-msvc": "1.0.1", "@napi-rs/nice-win32-x64-msvc": "1.0.1" } }, "sha512-zM0mVWSXE0a0h9aKACLwKmD6nHcRiKrPpCfvaKqG1CqDEyjEawId0ocXxVzPMCAm6kkWr2P025msfxXEnt8UGQ=="], - - "@napi-rs/nice-android-arm-eabi": ["@napi-rs/nice-android-arm-eabi@1.0.1", "", { "os": "android", "cpu": "arm" }, "sha512-5qpvOu5IGwDo7MEKVqqyAxF90I6aLj4n07OzpARdgDRfz8UbBztTByBp0RC59r3J1Ij8uzYi6jI7r5Lws7nn6w=="], - - "@napi-rs/nice-android-arm64": ["@napi-rs/nice-android-arm64@1.0.1", "", { "os": "android", "cpu": "arm64" }, "sha512-GqvXL0P8fZ+mQqG1g0o4AO9hJjQaeYG84FRfZaYjyJtZZZcMjXW5TwkL8Y8UApheJgyE13TQ4YNUssQaTgTyvA=="], - - "@napi-rs/nice-darwin-arm64": ["@napi-rs/nice-darwin-arm64@1.0.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-91k3HEqUl2fsrz/sKkuEkscj6EAj3/eZNCLqzD2AA0TtVbkQi8nqxZCZDMkfklULmxLkMxuUdKe7RvG/T6s2AA=="], - - "@napi-rs/nice-darwin-x64": ["@napi-rs/nice-darwin-x64@1.0.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-jXnMleYSIR/+TAN/p5u+NkCA7yidgswx5ftqzXdD5wgy/hNR92oerTXHc0jrlBisbd7DpzoaGY4cFD7Sm5GlgQ=="], - - "@napi-rs/nice-freebsd-x64": ["@napi-rs/nice-freebsd-x64@1.0.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-j+iJ/ezONXRQsVIB/FJfwjeQXX7A2tf3gEXs4WUGFrJjpe/z2KB7sOv6zpkm08PofF36C9S7wTNuzHZ/Iiccfw=="], - - "@napi-rs/nice-linux-arm-gnueabihf": ["@napi-rs/nice-linux-arm-gnueabihf@1.0.1", "", { "os": "linux", "cpu": "arm" }, "sha512-G8RgJ8FYXYkkSGQwywAUh84m946UTn6l03/vmEXBYNJxQJcD+I3B3k5jmjFG/OPiU8DfvxutOP8bi+F89MCV7Q=="], - - "@napi-rs/nice-linux-arm64-gnu": ["@napi-rs/nice-linux-arm64-gnu@1.0.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-IMDak59/W5JSab1oZvmNbrms3mHqcreaCeClUjwlwDr0m3BoR09ZiN8cKFBzuSlXgRdZ4PNqCYNeGQv7YMTjuA=="], - - "@napi-rs/nice-linux-arm64-musl": ["@napi-rs/nice-linux-arm64-musl@1.0.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-wG8fa2VKuWM4CfjOjjRX9YLIbysSVV1S3Kgm2Fnc67ap/soHBeYZa6AGMeR5BJAylYRjnoVOzV19Cmkco3QEPw=="], - - "@napi-rs/nice-linux-ppc64-gnu": ["@napi-rs/nice-linux-ppc64-gnu@1.0.1", "", { "os": "linux", "cpu": "ppc64" }, "sha512-lxQ9WrBf0IlNTCA9oS2jg/iAjQyTI6JHzABV664LLrLA/SIdD+I1i3Mjf7TsnoUbgopBcCuDztVLfJ0q9ubf6Q=="], - - "@napi-rs/nice-linux-riscv64-gnu": ["@napi-rs/nice-linux-riscv64-gnu@1.0.1", "", { "os": "linux", "cpu": "none" }, "sha512-3xs69dO8WSWBb13KBVex+yvxmUeEsdWexxibqskzoKaWx9AIqkMbWmE2npkazJoopPKX2ULKd8Fm9veEn0g4Ig=="], - - "@napi-rs/nice-linux-s390x-gnu": ["@napi-rs/nice-linux-s390x-gnu@1.0.1", "", { "os": "linux", "cpu": "s390x" }, "sha512-lMFI3i9rlW7hgToyAzTaEybQYGbQHDrpRkg+1gJWEpH0PLAQoZ8jiY0IzakLfNWnVda1eTYYlxxFYzW8Rqczkg=="], - - "@napi-rs/nice-linux-x64-gnu": ["@napi-rs/nice-linux-x64-gnu@1.0.1", "", { "os": "linux", "cpu": "x64" }, "sha512-XQAJs7DRN2GpLN6Fb+ZdGFeYZDdGl2Fn3TmFlqEL5JorgWKrQGRUrpGKbgZ25UeZPILuTKJ+OowG2avN8mThBA=="], - - "@napi-rs/nice-linux-x64-musl": ["@napi-rs/nice-linux-x64-musl@1.0.1", "", { "os": "linux", "cpu": "x64" }, "sha512-/rodHpRSgiI9o1faq9SZOp/o2QkKQg7T+DK0R5AkbnI/YxvAIEHf2cngjYzLMQSQgUhxym+LFr+UGZx4vK4QdQ=="], - - "@napi-rs/nice-win32-arm64-msvc": ["@napi-rs/nice-win32-arm64-msvc@1.0.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-rEcz9vZymaCB3OqEXoHnp9YViLct8ugF+6uO5McifTedjq4QMQs3DHz35xBEGhH3gJWEsXMUbzazkz5KNM5YUg=="], - - "@napi-rs/nice-win32-ia32-msvc": ["@napi-rs/nice-win32-ia32-msvc@1.0.1", "", { "os": "win32", "cpu": "ia32" }, "sha512-t7eBAyPUrWL8su3gDxw9xxxqNwZzAqKo0Szv3IjVQd1GpXXVkb6vBBQUuxfIYaXMzZLwlxRQ7uzM2vdUE9ULGw=="], - - "@napi-rs/nice-win32-x64-msvc": ["@napi-rs/nice-win32-x64-msvc@1.0.1", "", { "os": "win32", "cpu": "x64" }, "sha512-JlF+uDcatt3St2ntBG8H02F1mM45i5SF9W+bIKiReVE6wiy3o16oBP/yxt+RZ+N6LbCImJXJ6bXNO2kn9AXicg=="], - - "@noble/hashes": ["@noble/hashes@1.8.0", "", {}, "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A=="], - - "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="], - - "@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="], - - "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], - - "@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="], - - "@npmcli/fs": ["@npmcli/fs@4.0.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q=="], - - "@npmcli/git": ["@npmcli/git@6.0.3", "", { "dependencies": { "@npmcli/promise-spawn": "^8.0.0", "ini": "^5.0.0", "lru-cache": "^10.0.1", "npm-pick-manifest": "^10.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "semver": "^7.3.5", "which": "^5.0.0" } }, "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ=="], - - "@npmcli/installed-package-contents": ["@npmcli/installed-package-contents@3.0.0", "", { "dependencies": { "npm-bundled": "^4.0.0", "npm-normalize-package-bin": "^4.0.0" }, "bin": { "installed-package-contents": "bin/index.js" } }, "sha512-fkxoPuFGvxyrH+OQzyTkX2LUEamrF4jZSmxjAtPPHHGO0dqsQ8tTKjnIS8SAnPHdk2I03BDtSMR5K/4loKg79Q=="], - - "@npmcli/node-gyp": ["@npmcli/node-gyp@4.0.0", "", {}, "sha512-+t5DZ6mO/QFh78PByMq1fGSAub/agLJZDRfJRMeOSNCt8s9YVlTjmGpIPwPhvXTGUIJk+WszlT0rQa1W33yzNA=="], - - "@npmcli/package-json": ["@npmcli/package-json@6.2.0", "", { "dependencies": { "@npmcli/git": "^6.0.0", "glob": "^10.2.2", "hosted-git-info": "^8.0.0", "json-parse-even-better-errors": "^4.0.0", "proc-log": "^5.0.0", "semver": "^7.5.3", "validate-npm-package-license": "^3.0.4" } }, "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA=="], - - "@npmcli/promise-spawn": ["@npmcli/promise-spawn@8.0.2", "", { "dependencies": { "which": "^5.0.0" } }, "sha512-/bNJhjc+o6qL+Dwz/bqfTQClkEO5nTQ1ZEcdCkAQjhkZMHIh22LPG7fNh1enJP1NKWDqYiiABnjFCY7E0zHYtQ=="], - - "@npmcli/redact": ["@npmcli/redact@3.2.2", "", {}, "sha512-7VmYAmk4csGv08QzrDKScdzn11jHPFGyqJW39FyPgPuAp3zIaUmuCo1yxw9aGs+NEJuTGQ9Gwqpt93vtJubucg=="], - - "@npmcli/run-script": ["@npmcli/run-script@9.1.0", "", { "dependencies": { "@npmcli/node-gyp": "^4.0.0", "@npmcli/package-json": "^6.0.0", "@npmcli/promise-spawn": "^8.0.0", "node-gyp": "^11.0.0", "proc-log": "^5.0.0", "which": "^5.0.0" } }, "sha512-aoNSbxtkePXUlbZB+anS1LqsJdctG5n3UVhfU47+CDdwMi6uNTBMF9gPcQRnqghQd2FGzcwwIFBruFMxjhBewg=="], - - "@paralleldrive/cuid2": ["@paralleldrive/cuid2@2.2.2", "", { "dependencies": { "@noble/hashes": "^1.1.5" } }, "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA=="], - - "@parcel/watcher": ["@parcel/watcher@2.5.1", "", { "dependencies": { "detect-libc": "^1.0.3", "is-glob": "^4.0.3", "micromatch": "^4.0.5", "node-addon-api": "^7.0.0" }, "optionalDependencies": { "@parcel/watcher-android-arm64": "2.5.1", "@parcel/watcher-darwin-arm64": "2.5.1", "@parcel/watcher-darwin-x64": "2.5.1", "@parcel/watcher-freebsd-x64": "2.5.1", "@parcel/watcher-linux-arm-glibc": "2.5.1", "@parcel/watcher-linux-arm-musl": "2.5.1", "@parcel/watcher-linux-arm64-glibc": "2.5.1", "@parcel/watcher-linux-arm64-musl": "2.5.1", "@parcel/watcher-linux-x64-glibc": "2.5.1", "@parcel/watcher-linux-x64-musl": "2.5.1", "@parcel/watcher-win32-arm64": "2.5.1", "@parcel/watcher-win32-ia32": "2.5.1", "@parcel/watcher-win32-x64": "2.5.1" } }, "sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg=="], - - "@parcel/watcher-android-arm64": ["@parcel/watcher-android-arm64@2.5.1", "", { "os": "android", "cpu": "arm64" }, "sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA=="], - - "@parcel/watcher-darwin-arm64": ["@parcel/watcher-darwin-arm64@2.5.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw=="], - - "@parcel/watcher-darwin-x64": ["@parcel/watcher-darwin-x64@2.5.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg=="], - - "@parcel/watcher-freebsd-x64": ["@parcel/watcher-freebsd-x64@2.5.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ=="], - - "@parcel/watcher-linux-arm-glibc": ["@parcel/watcher-linux-arm-glibc@2.5.1", "", { "os": "linux", "cpu": "arm" }, "sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA=="], - - "@parcel/watcher-linux-arm-musl": ["@parcel/watcher-linux-arm-musl@2.5.1", "", { "os": "linux", "cpu": "arm" }, "sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q=="], - - "@parcel/watcher-linux-arm64-glibc": ["@parcel/watcher-linux-arm64-glibc@2.5.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w=="], - - "@parcel/watcher-linux-arm64-musl": ["@parcel/watcher-linux-arm64-musl@2.5.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg=="], - - "@parcel/watcher-linux-x64-glibc": ["@parcel/watcher-linux-x64-glibc@2.5.1", "", { "os": "linux", "cpu": "x64" }, "sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A=="], - - "@parcel/watcher-linux-x64-musl": ["@parcel/watcher-linux-x64-musl@2.5.1", "", { "os": "linux", "cpu": "x64" }, "sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg=="], - - "@parcel/watcher-win32-arm64": ["@parcel/watcher-win32-arm64@2.5.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw=="], - - "@parcel/watcher-win32-ia32": ["@parcel/watcher-win32-ia32@2.5.1", "", { "os": "win32", "cpu": "ia32" }, "sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ=="], - - "@parcel/watcher-win32-x64": ["@parcel/watcher-win32-x64@2.5.1", "", { "os": "win32", "cpu": "x64" }, "sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA=="], - - "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], - - "@protobufjs/aspromise": ["@protobufjs/aspromise@1.1.2", "", {}, "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="], - - "@protobufjs/base64": ["@protobufjs/base64@1.1.2", "", {}, "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="], - - "@protobufjs/codegen": ["@protobufjs/codegen@2.0.4", "", {}, "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="], - - "@protobufjs/eventemitter": ["@protobufjs/eventemitter@1.1.0", "", {}, "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="], - - "@protobufjs/fetch": ["@protobufjs/fetch@1.1.0", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" } }, "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ=="], - - "@protobufjs/float": ["@protobufjs/float@1.0.2", "", {}, "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="], - - "@protobufjs/inquire": ["@protobufjs/inquire@1.1.0", "", {}, "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="], - - "@protobufjs/path": ["@protobufjs/path@1.1.2", "", {}, "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="], - - "@protobufjs/pool": ["@protobufjs/pool@1.1.0", "", {}, "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="], - - "@protobufjs/utf8": ["@protobufjs/utf8@1.1.0", "", {}, "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="], - - "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.40.2", "", { "os": "android", "cpu": "arm" }, "sha512-JkdNEq+DFxZfUwxvB58tHMHBHVgX23ew41g1OQinthJ+ryhdRk67O31S7sYw8u2lTjHUPFxwar07BBt1KHp/hg=="], - - "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.40.2", "", { "os": "android", "cpu": "arm64" }, "sha512-13unNoZ8NzUmnndhPTkWPWbX3vtHodYmy+I9kuLxN+F+l+x3LdVF7UCu8TWVMt1POHLh6oDHhnOA04n8oJZhBw=="], - - "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.40.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Gzf1Hn2Aoe8VZzevHostPX23U7N5+4D36WJNHK88NZHCJr7aVMG4fadqkIf72eqVPGjGc0HJHNuUaUcxiR+N/w=="], - - "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.40.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-47N4hxa01a4x6XnJoskMKTS8XZ0CZMd8YTbINbi+w03A2w4j1RTlnGHOz/P0+Bg1LaVL6ufZyNprSg+fW5nYQQ=="], - - "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.40.2", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-8t6aL4MD+rXSHHZUR1z19+9OFJ2rl1wGKvckN47XFRVO+QL/dUSpKA2SLRo4vMg7ELA8pzGpC+W9OEd1Z/ZqoQ=="], - - "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.40.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-C+AyHBzfpsOEYRFjztcYUFsH4S7UsE9cDtHCtma5BK8+ydOZYgMmWg1d/4KBytQspJCld8ZIujFMAdKG1xyr4Q=="], - - "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.40.2", "", { "os": "linux", "cpu": "arm" }, "sha512-de6TFZYIvJwRNjmW3+gaXiZ2DaWL5D5yGmSYzkdzjBDS3W+B9JQ48oZEsmMvemqjtAFzE16DIBLqd6IQQRuG9Q=="], - - "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.40.2", "", { "os": "linux", "cpu": "arm" }, "sha512-urjaEZubdIkacKc930hUDOfQPysezKla/O9qV+O89enqsqUmQm8Xj8O/vh0gHg4LYfv7Y7UsE3QjzLQzDYN1qg=="], - - "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.40.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-KlE8IC0HFOC33taNt1zR8qNlBYHj31qGT1UqWqtvR/+NuCVhfufAq9fxO8BMFC22Wu0rxOwGVWxtCMvZVLmhQg=="], - - "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.40.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-j8CgxvfM0kbnhu4XgjnCWJQyyBOeBI1Zq91Z850aUddUmPeQvuAy6OiMdPS46gNFgy8gN1xkYyLgwLYZG3rBOg=="], - - "@rollup/rollup-linux-loongarch64-gnu": ["@rollup/rollup-linux-loongarch64-gnu@4.40.2", "", { "os": "linux", "cpu": "none" }, "sha512-Ybc/1qUampKuRF4tQXc7G7QY9YRyeVSykfK36Y5Qc5dmrIxwFhrOzqaVTNoZygqZ1ZieSWTibfFhQ5qK8jpWxw=="], - - "@rollup/rollup-linux-powerpc64le-gnu": ["@rollup/rollup-linux-powerpc64le-gnu@4.40.2", "", { "os": "linux", "cpu": "ppc64" }, "sha512-3FCIrnrt03CCsZqSYAOW/k9n625pjpuMzVfeI+ZBUSDT3MVIFDSPfSUgIl9FqUftxcUXInvFah79hE1c9abD+Q=="], - - "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.40.2", "", { "os": "linux", "cpu": "none" }, "sha512-QNU7BFHEvHMp2ESSY3SozIkBPaPBDTsfVNGx3Xhv+TdvWXFGOSH2NJvhD1zKAT6AyuuErJgbdvaJhYVhVqrWTg=="], - - "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.40.2", "", { "os": "linux", "cpu": "none" }, "sha512-5W6vNYkhgfh7URiXTO1E9a0cy4fSgfE4+Hl5agb/U1sa0kjOLMLC1wObxwKxecE17j0URxuTrYZZME4/VH57Hg=="], - - "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.40.2", "", { "os": "linux", "cpu": "s390x" }, "sha512-B7LKIz+0+p348JoAL4X/YxGx9zOx3sR+o6Hj15Y3aaApNfAshK8+mWZEf759DXfRLeL2vg5LYJBB7DdcleYCoQ=="], - - "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.40.2", "", { "os": "linux", "cpu": "x64" }, "sha512-lG7Xa+BmBNwpjmVUbmyKxdQJ3Q6whHjMjzQplOs5Z+Gj7mxPtWakGHqzMqNER68G67kmCX9qX57aRsW5V0VOng=="], - - "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.40.2", "", { "os": "linux", "cpu": "x64" }, "sha512-tD46wKHd+KJvsmije4bUskNuvWKFcTOIM9tZ/RrmIvcXnbi0YK/cKS9FzFtAm7Oxi2EhV5N2OpfFB348vSQRXA=="], - - "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.40.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-Bjv/HG8RRWLNkXwQQemdsWw4Mg+IJ29LK+bJPW2SCzPKOUaMmPEppQlu/Fqk1d7+DX3V7JbFdbkh/NMmurT6Pg=="], - - "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.40.2", "", { "os": "win32", "cpu": "ia32" }, "sha512-dt1llVSGEsGKvzeIO76HToiYPNPYPkmjhMHhP00T9S4rDern8P2ZWvWAQUEJ+R1UdMWJ/42i/QqJ2WV765GZcA=="], - - "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.40.2", "", { "os": "win32", "cpu": "x64" }, "sha512-bwspbWB04XJpeElvsp+DCylKfF4trJDa2Y9Go8O6A7YLX2LIKGcNK/CYImJN6ZP4DcuOHB4Utl3iCbnR62DudA=="], - - "@schematics/angular": ["@schematics/angular@20.0.1", "", { "dependencies": { "@angular-devkit/core": "20.0.1", "@angular-devkit/schematics": "20.0.1", "jsonc-parser": "3.3.1" } }, "sha512-29T9vUAjZnbXM+vImIQcdqG/ibdcfj5+pybo5cbiMSwVPVyerXgnD0HKC4dyZ34V2RFZa8cmyCLe/5bYoPQ+0g=="], - - "@sec-ant/readable-stream": ["@sec-ant/readable-stream@0.4.1", "", {}, "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg=="], - - "@sigstore/bundle": ["@sigstore/bundle@3.1.0", "", { "dependencies": { "@sigstore/protobuf-specs": "^0.4.0" } }, "sha512-Mm1E3/CmDDCz3nDhFKTuYdB47EdRFRQMOE/EAbiG1MJW77/w1b3P7Qx7JSrVJs8PfwOLOVcKQCHErIwCTyPbag=="], - - "@sigstore/core": ["@sigstore/core@2.0.0", "", {}, "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg=="], - - "@sigstore/protobuf-specs": ["@sigstore/protobuf-specs@0.4.3", "", {}, "sha512-fk2zjD9117RL9BjqEwF7fwv7Q/P9yGsMV4MUJZ/DocaQJ6+3pKr+syBq1owU5Q5qGw5CUbXzm+4yJ2JVRDQeSA=="], - - "@sigstore/sign": ["@sigstore/sign@3.1.0", "", { "dependencies": { "@sigstore/bundle": "^3.1.0", "@sigstore/core": "^2.0.0", "@sigstore/protobuf-specs": "^0.4.0", "make-fetch-happen": "^14.0.2", "proc-log": "^5.0.0", "promise-retry": "^2.0.1" } }, "sha512-knzjmaOHOov1Ur7N/z4B1oPqZ0QX5geUfhrVaqVlu+hl0EAoL4o+l0MSULINcD5GCWe3Z0+YJO8ues6vFlW0Yw=="], - - "@sigstore/tuf": ["@sigstore/tuf@3.1.1", "", { "dependencies": { "@sigstore/protobuf-specs": "^0.4.1", "tuf-js": "^3.0.1" } }, "sha512-eFFvlcBIoGwVkkwmTi/vEQFSva3xs5Ot3WmBcjgjVdiaoelBLQaQ/ZBfhlG0MnG0cmTYScPpk7eDdGDWUcFUmg=="], - - "@sigstore/verify": ["@sigstore/verify@2.1.1", "", { "dependencies": { "@sigstore/bundle": "^3.1.0", "@sigstore/core": "^2.0.0", "@sigstore/protobuf-specs": "^0.4.1" } }, "sha512-hVJD77oT67aowHxwT4+M6PGOp+E2LtLdTK3+FC0lBO9T7sYwItDMXZ7Z07IDCvR1M717a4axbIWckrW67KMP/w=="], - - "@sindresorhus/is": ["@sindresorhus/is@7.0.2", "", {}, "sha512-d9xRovfKNz1SKieM0qJdO+PQonjnnIfSNWfHYnBSJ9hkjm0ZPw6HlxscDXYstp3z+7V2GOFHc+J0CYrYTjqCJw=="], - - "@socket.io/component-emitter": ["@socket.io/component-emitter@3.1.2", "", {}, "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA=="], - - "@stock-bot/cache": ["@stock-bot/cache@workspace:libs/cache"], - - "@stock-bot/config": ["@stock-bot/config@workspace:libs/config"], - - "@stock-bot/data-adjustments": ["@stock-bot/data-adjustments@workspace:libs/data-adjustments"], - - "@stock-bot/data-frame": ["@stock-bot/data-frame@workspace:libs/data-frame"], - - "@stock-bot/data-service": ["@stock-bot/data-service@workspace:apps/data-service"], - - "@stock-bot/event-bus": ["@stock-bot/event-bus@workspace:libs/event-bus"], - - "@stock-bot/execution-service": ["@stock-bot/execution-service@workspace:apps/execution-service"], - - "@stock-bot/http": ["@stock-bot/http@workspace:libs/http"], - - "@stock-bot/logger": ["@stock-bot/logger@workspace:libs/logger"], - - "@stock-bot/mongodb-client": ["@stock-bot/mongodb-client@workspace:libs/mongodb-client"], - - "@stock-bot/portfolio-service": ["@stock-bot/portfolio-service@workspace:apps/portfolio-service"], - - "@stock-bot/postgres-client": ["@stock-bot/postgres-client@workspace:libs/postgres-client"], - - "@stock-bot/processing-service": ["@stock-bot/processing-service@workspace:apps/processing-service"], - - "@stock-bot/questdb-client": ["@stock-bot/questdb-client@workspace:libs/questdb-client"], - - "@stock-bot/shutdown": ["@stock-bot/shutdown@workspace:libs/shutdown"], - - "@stock-bot/strategy-engine": ["@stock-bot/strategy-engine@workspace:libs/strategy-engine"], - - "@stock-bot/strategy-service": ["@stock-bot/strategy-service@workspace:apps/strategy-service"], - - "@stock-bot/types": ["@stock-bot/types@workspace:libs/types"], - - "@stock-bot/utils": ["@stock-bot/utils@workspace:libs/utils"], - - "@stock-bot/vector-engine": ["@stock-bot/vector-engine@workspace:libs/vector-engine"], - - "@szmarczak/http-timer": ["@szmarczak/http-timer@5.0.1", "", { "dependencies": { "defer-to-connect": "^2.0.1" } }, "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw=="], - - "@tailwindcss/node": ["@tailwindcss/node@4.1.8", "", { "dependencies": { "@ampproject/remapping": "^2.3.0", "enhanced-resolve": "^5.18.1", "jiti": "^2.4.2", "lightningcss": "1.30.1", "magic-string": "^0.30.17", "source-map-js": "^1.2.1", "tailwindcss": "4.1.8" } }, "sha512-OWwBsbC9BFAJelmnNcrKuf+bka2ZxCE2A4Ft53Tkg4uoiE67r/PMEYwCsourC26E+kmxfwE0hVzMdxqeW+xu7Q=="], - - "@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.8", "", { "dependencies": { "detect-libc": "^2.0.4", "tar": "^7.4.3" }, "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.8", "@tailwindcss/oxide-darwin-arm64": "4.1.8", "@tailwindcss/oxide-darwin-x64": "4.1.8", "@tailwindcss/oxide-freebsd-x64": "4.1.8", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.8", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.8", "@tailwindcss/oxide-linux-arm64-musl": "4.1.8", "@tailwindcss/oxide-linux-x64-gnu": "4.1.8", "@tailwindcss/oxide-linux-x64-musl": "4.1.8", "@tailwindcss/oxide-wasm32-wasi": "4.1.8", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.8", "@tailwindcss/oxide-win32-x64-msvc": "4.1.8" } }, "sha512-d7qvv9PsM5N3VNKhwVUhpK6r4h9wtLkJ6lz9ZY9aeZgrUWk1Z8VPyqyDT9MZlem7GTGseRQHkeB1j3tC7W1P+A=="], - - "@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.8", "", { "os": "android", "cpu": "arm64" }, "sha512-Fbz7qni62uKYceWYvUjRqhGfZKwhZDQhlrJKGtnZfuNtHFqa8wmr+Wn74CTWERiW2hn3mN5gTpOoxWKk0jRxjg=="], - - "@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.8", "", { "os": "darwin", "cpu": "arm64" }, "sha512-RdRvedGsT0vwVVDztvyXhKpsU2ark/BjgG0huo4+2BluxdXo8NDgzl77qh0T1nUxmM11eXwR8jA39ibvSTbi7A=="], - - "@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.8", "", { "os": "darwin", "cpu": "x64" }, "sha512-t6PgxjEMLp5Ovf7uMb2OFmb3kqzVTPPakWpBIFzppk4JE4ix0yEtbtSjPbU8+PZETpaYMtXvss2Sdkx8Vs4XRw=="], - - "@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.8", "", { "os": "freebsd", "cpu": "x64" }, "sha512-g8C8eGEyhHTqwPStSwZNSrOlyx0bhK/V/+zX0Y+n7DoRUzyS8eMbVshVOLJTDDC+Qn9IJnilYbIKzpB9n4aBsg=="], - - "@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.8", "", { "os": "linux", "cpu": "arm" }, "sha512-Jmzr3FA4S2tHhaC6yCjac3rGf7hG9R6Gf2z9i9JFcuyy0u79HfQsh/thifbYTF2ic82KJovKKkIB6Z9TdNhCXQ=="], - - "@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.8", "", { "os": "linux", "cpu": "arm64" }, "sha512-qq7jXtO1+UEtCmCeBBIRDrPFIVI4ilEQ97qgBGdwXAARrUqSn/L9fUrkb1XP/mvVtoVeR2bt/0L77xx53bPZ/Q=="], - - "@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.8", "", { "os": "linux", "cpu": "arm64" }, "sha512-O6b8QesPbJCRshsNApsOIpzKt3ztG35gfX9tEf4arD7mwNinsoCKxkj8TgEE0YRjmjtO3r9FlJnT/ENd9EVefQ=="], - - "@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.8", "", { "os": "linux", "cpu": "x64" }, "sha512-32iEXX/pXwikshNOGnERAFwFSfiltmijMIAbUhnNyjFr3tmWmMJWQKU2vNcFX0DACSXJ3ZWcSkzNbaKTdngH6g=="], - - "@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.8", "", { "os": "linux", "cpu": "x64" }, "sha512-s+VSSD+TfZeMEsCaFaHTaY5YNj3Dri8rST09gMvYQKwPphacRG7wbuQ5ZJMIJXN/puxPcg/nU+ucvWguPpvBDg=="], - - "@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.8", "", { "dependencies": { "@emnapi/core": "^1.4.3", "@emnapi/runtime": "^1.4.3", "@emnapi/wasi-threads": "^1.0.2", "@napi-rs/wasm-runtime": "^0.2.10", "@tybys/wasm-util": "^0.9.0", "tslib": "^2.8.0" }, "cpu": "none" }, "sha512-CXBPVFkpDjM67sS1psWohZ6g/2/cd+cq56vPxK4JeawelxwK4YECgl9Y9TjkE2qfF+9/s1tHHJqrC4SS6cVvSg=="], - - "@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.8", "", { "os": "win32", "cpu": "arm64" }, "sha512-7GmYk1n28teDHUjPlIx4Z6Z4hHEgvP5ZW2QS9ygnDAdI/myh3HTHjDqtSqgu1BpRoI4OiLx+fThAyA1JePoENA=="], - - "@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.8", "", { "os": "win32", "cpu": "x64" }, "sha512-fou+U20j+Jl0EHwK92spoWISON2OBnCazIc038Xj2TdweYV33ZRkS9nwqiUi2d/Wba5xg5UoHfvynnb/UB49cQ=="], - - "@tailwindcss/postcss": ["@tailwindcss/postcss@4.1.8", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "@tailwindcss/node": "4.1.8", "@tailwindcss/oxide": "4.1.8", "postcss": "^8.4.41", "tailwindcss": "4.1.8" } }, "sha512-vB/vlf7rIky+w94aWMw34bWW1ka6g6C3xIOdICKX2GC0VcLtL6fhlLiafF0DVIwa9V6EHz8kbWMkS2s2QvvNlw=="], - - "@testcontainers/mongodb": ["@testcontainers/mongodb@10.28.0", "", { "dependencies": { "testcontainers": "^10.28.0" } }, "sha512-78h6n2jnFOQ8IfPjgL1+vsHuEeA0itclEOpx9kkQR+FOWnwJN9AeeX6+rMmZCtRgTsr5wT0BvfFoDssMkDqWaQ=="], - - "@testcontainers/postgresql": ["@testcontainers/postgresql@10.28.0", "", { "dependencies": { "testcontainers": "^10.28.0" } }, "sha512-NN25rruG5D4Q7pCNIJuHwB+G85OSeJ3xHZ2fWx0O6sPoPEfCYwvpj8mq99cyn68nxFkFYZeyrZJtSFO+FnydiA=="], - - "@tufjs/canonical-json": ["@tufjs/canonical-json@2.0.0", "", {}, "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA=="], - - "@tufjs/models": ["@tufjs/models@3.0.1", "", { "dependencies": { "@tufjs/canonical-json": "2.0.0", "minimatch": "^9.0.5" } }, "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA=="], - - "@types/bun": ["@types/bun@1.2.15", "", { "dependencies": { "bun-types": "1.2.15" } }, "sha512-U1ljPdBEphF0nw1MIk0hI7kPg7dFdPyM7EenHsp6W5loNHl7zqy6JQf/RKCgnUn2KDzUpkBwHPnEJEjII594bA=="], - - "@types/cookiejar": ["@types/cookiejar@2.1.5", "", {}, "sha512-he+DHOWReW0nghN24E1WUqM0efK4kI9oTqDm6XmK8ZPe2djZ90BSNdGnIyCLzCPw7/pogPlGbzI2wHGGmi4O/Q=="], - - "@types/cors": ["@types/cors@2.8.19", "", { "dependencies": { "@types/node": "*" } }, "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg=="], - - "@types/docker-modem": ["@types/docker-modem@3.0.6", "", { "dependencies": { "@types/node": "*", "@types/ssh2": "*" } }, "sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg=="], - - "@types/dockerode": ["@types/dockerode@3.3.40", "", { "dependencies": { "@types/docker-modem": "*", "@types/node": "*", "@types/ssh2": "*" } }, "sha512-O1ckSFYbcYv/KcnAHMLCnKQYY8/5+6CRzpsOPcQIePHRX2jG4Gmz8uXPMCXIxTGN9OYkE5eox/L67l2sGY1UYg=="], - - "@types/estree": ["@types/estree@1.0.7", "", {}, "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ=="], - - "@types/http-cache-semantics": ["@types/http-cache-semantics@4.0.4", "", {}, "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA=="], - - "@types/jasmine": ["@types/jasmine@5.1.8", "", {}, "sha512-u7/CnvRdh6AaaIzYjCgUuVbREFgulhX05Qtf6ZtW+aOcjCKKVvKgpkPYJBFTZSHtFBYimzU4zP0V2vrEsq9Wcg=="], - - "@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="], - - "@types/methods": ["@types/methods@1.1.4", "", {}, "sha512-ymXWVrDiCxTBE3+RIrrP533E70eA+9qu7zdWoHuOmGujkYtzf4HQF96b8nwHLqhuf4ykX61IGRIB38CC6/sImQ=="], - - "@types/node": ["@types/node@22.15.30", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-6Q7lr06bEHdlfplU6YRbgG1SFBdlsfNC4/lX+SkhiTs0cpJkOElmWls8PxDFv4yY/xKb8Y6SO0OmSX4wgqTZbA=="], - - "@types/pg": ["@types/pg@8.15.4", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^2.2.0" } }, "sha512-I6UNVBAoYbvuWkkU3oosC8yxqH21f4/Jc4DK71JLG3dT2mdlGe1z+ep/LQGXaKaOgcvUrsQoPRqfgtMcvZiJhg=="], - - "@types/semver": ["@types/semver@7.7.0", "", {}, "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA=="], - - "@types/ssh2": ["@types/ssh2@0.5.52", "", { "dependencies": { "@types/node": "*", "@types/ssh2-streams": "*" } }, "sha512-lbLLlXxdCZOSJMCInKH2+9V/77ET2J6NPQHpFI0kda61Dd1KglJs+fPQBchizmzYSOJBgdTajhPqBO1xxLywvg=="], - - "@types/ssh2-streams": ["@types/ssh2-streams@0.1.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-Sy8tpEmCce4Tq0oSOYdfqaBpA3hDM8SoxoFh5vzFsu2oL+znzGz8oVWW7xb4K920yYMUY+PIG31qZnFMfPWNCg=="], - - "@types/superagent": ["@types/superagent@8.1.9", "", { "dependencies": { "@types/cookiejar": "^2.1.5", "@types/methods": "^1.1.4", "@types/node": "*", "form-data": "^4.0.0" } }, "sha512-pTVjI73witn+9ILmoJdajHGW2jkSaOzhiFYF1Rd3EQ94kymLqB9PjD9ISg7WaALC7+dCHT0FGe9T2LktLq/3GQ=="], - - "@types/supertest": ["@types/supertest@6.0.3", "", { "dependencies": { "@types/methods": "^1.1.4", "@types/superagent": "^8.1.0" } }, "sha512-8WzXq62EXFhJ7QsH3Ocb/iKQ/Ty9ZVWnVzoTKc9tyyFRRF3a74Tk2+TLFgaFFw364Ere+npzHKEJ6ga2LzIL7w=="], - - "@types/webidl-conversions": ["@types/webidl-conversions@7.0.3", "", {}, "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA=="], - - "@types/whatwg-url": ["@types/whatwg-url@11.0.5", "", { "dependencies": { "@types/webidl-conversions": "*" } }, "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ=="], - - "@types/ws": ["@types/ws@8.18.1", "", { "dependencies": { "@types/node": "*" } }, "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg=="], - - "@types/yup": ["@types/yup@0.32.0", "", { "dependencies": { "yup": "*" } }, "sha512-Gr2lllWTDxGVYHgWfL8szjdedERpNgm44L9BDL2cmcHG7Bfd6taEpiW3ayMFLaYvlJr/6bFXDJdh6L406AGlFg=="], - - "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@6.21.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.5.1", "@typescript-eslint/scope-manager": "6.21.0", "@typescript-eslint/type-utils": "6.21.0", "@typescript-eslint/utils": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", "natural-compare": "^1.4.0", "semver": "^7.5.4", "ts-api-utils": "^1.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", "eslint": "^7.0.0 || ^8.0.0" } }, "sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA=="], - - "@typescript-eslint/parser": ["@typescript-eslint/parser@6.21.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "6.21.0", "@typescript-eslint/types": "6.21.0", "@typescript-eslint/typescript-estree": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^7.0.0 || ^8.0.0" } }, "sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ=="], - - "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@6.21.0", "", { "dependencies": { "@typescript-eslint/types": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0" } }, "sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg=="], - - "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@6.21.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "6.21.0", "@typescript-eslint/utils": "6.21.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, "peerDependencies": { "eslint": "^7.0.0 || ^8.0.0" } }, "sha512-rZQI7wHfao8qMX3Rd3xqeYSMCL3SoiSQLBATSiVKARdFGCYSRvmViieZjqc58jKgs8Y8i9YvVVhRbHSTA4VBag=="], - - "@typescript-eslint/types": ["@typescript-eslint/types@6.21.0", "", {}, "sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg=="], - - "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@6.21.0", "", { "dependencies": { "@typescript-eslint/types": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", "minimatch": "9.0.3", "semver": "^7.5.4", "ts-api-utils": "^1.0.1" } }, "sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ=="], - - "@typescript-eslint/utils": ["@typescript-eslint/utils@6.21.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", "@typescript-eslint/scope-manager": "6.21.0", "@typescript-eslint/types": "6.21.0", "@typescript-eslint/typescript-estree": "6.21.0", "semver": "^7.5.4" }, "peerDependencies": { "eslint": "^7.0.0 || ^8.0.0" } }, "sha512-NfWVaC8HP9T8cbKQxHcsJBY5YE1O33+jpMwN45qzWWaPDZgLIbo12toGMWnmhvCpd3sIxkpDw3Wv1B3dYrbDQQ=="], - - "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@6.21.0", "", { "dependencies": { "@typescript-eslint/types": "6.21.0", "eslint-visitor-keys": "^3.4.1" } }, "sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A=="], - - "@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="], - - "@vitejs/plugin-basic-ssl": ["@vitejs/plugin-basic-ssl@2.0.0", "", { "peerDependencies": { "vite": "^6.0.0" } }, "sha512-gc9Tjg8bUxBVSTzeWT3Njc0Cl3PakHFKdNfABnZWiUgbxqmHDEn7uECv3fHVylxoYgNzAcmU7ZrILz+BwSo3sA=="], - - "@yarnpkg/lockfile": ["@yarnpkg/lockfile@1.1.0", "", {}, "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ=="], - - "abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="], - - "abort-controller": ["abort-controller@3.0.0", "", { "dependencies": { "event-target-shim": "^5.0.0" } }, "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg=="], - - "accepts": ["accepts@1.3.8", "", { "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw=="], - - "acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="], - - "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], - - "agent-base": ["agent-base@7.1.3", "", {}, "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw=="], - - "ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="], - - "ajv-formats": ["ajv-formats@3.0.1", "", { "dependencies": { "ajv": "^8.0.0" } }, "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ=="], - - "ansi-escapes": ["ansi-escapes@4.3.2", "", { "dependencies": { "type-fest": "^0.21.3" } }, "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ=="], - - "ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], - - "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], - - "anymatch": ["anymatch@3.1.3", "", { "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="], - - "archiver": ["archiver@7.0.1", "", { "dependencies": { "archiver-utils": "^5.0.2", "async": "^3.2.4", "buffer-crc32": "^1.0.0", "readable-stream": "^4.0.0", "readdir-glob": "^1.1.2", "tar-stream": "^3.0.0", "zip-stream": "^6.0.1" } }, "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ=="], - - "archiver-utils": ["archiver-utils@5.0.2", "", { "dependencies": { "glob": "^10.0.0", "graceful-fs": "^4.2.0", "is-stream": "^2.0.1", "lazystream": "^1.0.0", "lodash": "^4.17.15", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" } }, "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA=="], - - "argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], - - "array-union": ["array-union@2.1.0", "", {}, "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw=="], - - "asap": ["asap@2.0.6", "", {}, "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA=="], - - "asn1": ["asn1@0.2.6", "", { "dependencies": { "safer-buffer": "~2.1.0" } }, "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ=="], - - "async": ["async@3.2.6", "", {}, "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="], - - "async-lock": ["async-lock@1.4.1", "", {}, "sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ=="], - - "async-mutex": ["async-mutex@0.4.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-WfoBo4E/TbCX1G95XTjbWTE3X2XLG0m1Xbv2cwOtuPdyH9CZvnaA5nCt1ucjaKEgW2A5IF71hxrRhr83Je5xjA=="], - - "asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="], - - "atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="], - - "autoprefixer": ["autoprefixer@10.4.21", "", { "dependencies": { "browserslist": "^4.24.4", "caniuse-lite": "^1.0.30001702", "fraction.js": "^4.3.7", "normalize-range": "^0.1.2", "picocolors": "^1.1.1", "postcss-value-parser": "^4.2.0" }, "peerDependencies": { "postcss": "^8.1.0" }, "bin": { "autoprefixer": "bin/autoprefixer" } }, "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ=="], - - "axios": ["axios@1.9.0", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-re4CqKTJaURpzbLHtIi6XpDv20/CnpXOtjRY5/CU32L8gU8ek9UIivcfvSWvmKEngmVbrUtPpdDwWDWL7DNHvg=="], - - "b4a": ["b4a@1.6.7", "", {}, "sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg=="], - - "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], - - "bare-events": ["bare-events@2.5.4", "", {}, "sha512-+gFfDkR8pj4/TrWCGUGWmJIkBwuxPS5F+a5yWjOHQt2hHvNZd5YLzadjmDUtFmMM4y429bnKLa8bYBMHcYdnQA=="], - - "bare-fs": ["bare-fs@4.1.5", "", { "dependencies": { "bare-events": "^2.5.4", "bare-path": "^3.0.0", "bare-stream": "^2.6.4" }, "peerDependencies": { "bare-buffer": "*" }, "optionalPeers": ["bare-buffer"] }, "sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA=="], - - "bare-os": ["bare-os@3.6.1", "", {}, "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g=="], - - "bare-path": ["bare-path@3.0.0", "", { "dependencies": { "bare-os": "^3.0.1" } }, "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw=="], - - "bare-stream": ["bare-stream@2.6.5", "", { "dependencies": { "streamx": "^2.21.0" }, "peerDependencies": { "bare-buffer": "*", "bare-events": "*" }, "optionalPeers": ["bare-buffer", "bare-events"] }, "sha512-jSmxKJNJmHySi6hC42zlZnq00rga4jjxcgNZjY9N5WlOe/iOoGRtdwGsHzQv2RlH2KOYMwGUXhf2zXd32BA9RA=="], - - "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], - - "base64id": ["base64id@2.0.0", "", {}, "sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog=="], - - "bcrypt-pbkdf": ["bcrypt-pbkdf@1.0.2", "", { "dependencies": { "tweetnacl": "^0.14.3" } }, "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w=="], - - "beasties": ["beasties@0.3.4", "", { "dependencies": { "css-select": "^5.1.0", "css-what": "^6.1.0", "dom-serializer": "^2.0.0", "domhandler": "^5.0.3", "htmlparser2": "^10.0.0", "picocolors": "^1.1.1", "postcss": "^8.4.49", "postcss-media-query-parser": "^0.2.3" } }, "sha512-NmzN1zN1cvGccXFyZ73335+ASXwBlVWcUPssiUDIlFdfyatHPRRufjCd5w8oPaQPvVnf9ELklaCGb1gi9FBwIw=="], - - "binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="], - - "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="], - - "body-parser": ["body-parser@1.20.3", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g=="], - - "boolbase": ["boolbase@1.0.0", "", {}, "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="], - - "brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="], - - "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], - - "browserslist": ["browserslist@4.25.0", "", { "dependencies": { "caniuse-lite": "^1.0.30001718", "electron-to-chromium": "^1.5.160", "node-releases": "^2.0.19", "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" } }, "sha512-PJ8gYKeS5e/whHBh8xrwYK+dAvEj7JXtz6uTucnMRB8OiGTsKccFekoRrjajPBHV8oOY+2tI4uxeceSimKwMFA=="], - - "bson": ["bson@6.10.4", "", {}, "sha512-WIsKqkSC0ABoBJuT1LEX+2HEvNmNKKgnTAyd0fL8qzK4SH2i9NXg+t08YtdZp/V9IZ33cxe3iV4yM0qg8lMQng=="], - - "buffer": ["buffer@6.0.3", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" } }, "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA=="], - - "buffer-crc32": ["buffer-crc32@1.0.0", "", {}, "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w=="], - - "buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="], - - "buildcheck": ["buildcheck@0.0.6", "", {}, "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A=="], - - "bullmq": ["bullmq@5.53.2", "", { "dependencies": { "cron-parser": "^4.9.0", "ioredis": "^5.4.1", "msgpackr": "^1.11.2", "node-abort-controller": "^3.1.1", "semver": "^7.5.4", "tslib": "^2.0.0", "uuid": "^9.0.0" } }, "sha512-xHgxrP/yNJHD7VCw1h+eRBh+2TCPBCM39uC9gCyksYc6ufcJP+HTZ/A2lzB2x7qMFWrvsX7tM40AT2BmdkYL/Q=="], - - "bun-types": ["bun-types@1.2.15", "", { "dependencies": { "@types/node": "*" } }, "sha512-NarRIaS+iOaQU1JPfyKhZm4AsUOrwUOqRNHY0XxI8GI8jYxiLXLcdjYMG9UKS+fwWasc1uw1htV9AX24dD+p4w=="], - - "byline": ["byline@5.0.0", "", {}, "sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q=="], - - "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], - - "cacache": ["cacache@19.0.1", "", { "dependencies": { "@npmcli/fs": "^4.0.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^7.0.2", "ssri": "^12.0.0", "tar": "^7.4.3", "unique-filename": "^4.0.0" } }, "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ=="], - - "cacheable-lookup": ["cacheable-lookup@7.0.0", "", {}, "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w=="], - - "cacheable-request": ["cacheable-request@12.0.1", "", { "dependencies": { "@types/http-cache-semantics": "^4.0.4", "get-stream": "^9.0.1", "http-cache-semantics": "^4.1.1", "keyv": "^4.5.4", "mimic-response": "^4.0.0", "normalize-url": "^8.0.1", "responselike": "^3.0.0" } }, "sha512-Yo9wGIQUaAfIbk+qY0X4cDQgCosecfBe3V9NSyeY4qPC2SAkbCS4Xj79VP8WOzitpJUZKc/wsRCYF5ariDIwkg=="], - - "call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="], - - "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="], - - "call-bound": ["call-bound@1.0.4", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "get-intrinsic": "^1.3.0" } }, "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg=="], - - "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], - - "camelcase": ["camelcase@6.3.0", "", {}, "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA=="], - - "caniuse-lite": ["caniuse-lite@1.0.30001721", "", {}, "sha512-cOuvmUVtKrtEaoKiO0rSc29jcjwMwX5tOHDy4MgVFEWiUXj4uBMJkwI8MDySkgXidpMiHUcviogAvFi4pA2hDQ=="], - - "chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], - - "chardet": ["chardet@0.7.0", "", {}, "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA=="], - - "chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="], - - "chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="], - - "cli-cursor": ["cli-cursor@5.0.0", "", { "dependencies": { "restore-cursor": "^5.0.0" } }, "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw=="], - - "cli-spinners": ["cli-spinners@2.9.2", "", {}, "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg=="], - - "cli-truncate": ["cli-truncate@4.0.0", "", { "dependencies": { "slice-ansi": "^5.0.0", "string-width": "^7.0.0" } }, "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA=="], - - "cli-width": ["cli-width@4.1.0", "", {}, "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ=="], - - "cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="], - - "cluster-key-slot": ["cluster-key-slot@1.1.2", "", {}, "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA=="], - - "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], - - "color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], - - "colorette": ["colorette@2.0.20", "", {}, "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="], - - "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="], - - "commander": ["commander@14.0.0", "", {}, "sha512-2uM9rYjPvyq39NwLRqaiLtWHyDC1FvryJDa2ATTVims5YAS4PupsEQsDvP14FqhFr0P49CYDugi59xaxJlTXRA=="], - - "commondir": ["commondir@1.0.1", "", {}, "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg=="], - - "component-emitter": ["component-emitter@1.3.1", "", {}, "sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ=="], - - "compress-commons": ["compress-commons@6.0.2", "", { "dependencies": { "crc-32": "^1.2.0", "crc32-stream": "^6.0.0", "is-stream": "^2.0.1", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" } }, "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg=="], - - "concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="], - - "connect": ["connect@3.7.0", "", { "dependencies": { "debug": "2.6.9", "finalhandler": "1.1.2", "parseurl": "~1.3.3", "utils-merge": "1.0.1" } }, "sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ=="], - - "content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="], - - "convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="], - - "cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], - - "cookiejar": ["cookiejar@2.1.4", "", {}, "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw=="], - - "core-util-is": ["core-util-is@1.0.3", "", {}, "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="], - - "cors": ["cors@2.8.5", "", { "dependencies": { "object-assign": "^4", "vary": "^1" } }, "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g=="], - - "cpu-features": ["cpu-features@0.0.10", "", { "dependencies": { "buildcheck": "~0.0.6", "nan": "^2.19.0" } }, "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA=="], - - "crc-32": ["crc-32@1.2.2", "", { "bin": { "crc32": "bin/crc32.njs" } }, "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ=="], - - "crc32-stream": ["crc32-stream@6.0.0", "", { "dependencies": { "crc-32": "^1.2.0", "readable-stream": "^4.0.0" } }, "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g=="], - - "cron-parser": ["cron-parser@4.9.0", "", { "dependencies": { "luxon": "^3.2.1" } }, "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q=="], - - "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], - - "css-select": ["css-select@5.1.0", "", { "dependencies": { "boolbase": "^1.0.0", "css-what": "^6.1.0", "domhandler": "^5.0.2", "domutils": "^3.0.1", "nth-check": "^2.0.1" } }, "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg=="], - - "css-what": ["css-what@6.1.0", "", {}, "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw=="], - - "custom-event": ["custom-event@1.0.1", "", {}, "sha512-GAj5FOq0Hd+RsCGVJxZuKaIDXDf3h6GQoNEjFgbLLI/trgtavwUbSnZ5pVfg27DVCaWjIohryS0JFwIJyT2cMg=="], - - "date-fns": ["date-fns@2.30.0", "", { "dependencies": { "@babel/runtime": "^7.21.0" } }, "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw=="], - - "date-format": ["date-format@4.0.14", "", {}, "sha512-39BOQLs9ZjKh0/patS9nrT8wc3ioX3/eA/zgbKNopnF2wCqJEoxywwwElATYvRsXdnOxA/OQeQoFZ3rFjVajhg=="], - - "dateformat": ["dateformat@4.6.3", "", {}, "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA=="], - - "debug": ["debug@4.4.1", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="], - - "decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="], - - "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], - - "defer-to-connect": ["defer-to-connect@2.0.1", "", {}, "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg=="], - - "define-data-property": ["define-data-property@1.1.4", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.0.1" } }, "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A=="], - - "delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="], - - "denque": ["denque@2.1.0", "", {}, "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw=="], - - "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], - - "destroy": ["destroy@1.2.0", "", {}, "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="], - - "detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="], - - "dezalgo": ["dezalgo@1.0.4", "", { "dependencies": { "asap": "^2.0.0", "wrappy": "1" } }, "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig=="], - - "di": ["di@0.0.1", "", {}, "sha512-uJaamHkagcZtHPqCIHZxnFrXlunQXgBOsZSUOWwFw31QJCAbyTBoHMW75YOTur5ZNx8pIeAKgf6GWIgaqqiLhA=="], - - "dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="], - - "discontinuous-range": ["discontinuous-range@1.0.0", "", {}, "sha512-c68LpLbO+7kP/b1Hr1qs8/BJ09F5khZGTxqxZuhzxpmwJKOgRFHJWIb9/KmqnqHhLdO55aOxFH/EGBvUQbL/RQ=="], - - "docker-compose": ["docker-compose@0.24.8", "", { "dependencies": { "yaml": "^2.2.2" } }, "sha512-plizRs/Vf15H+GCVxq2EUvyPK7ei9b/cVesHvjnX4xaXjM9spHe2Ytq0BitndFgvTJ3E3NljPNUEl7BAN43iZw=="], - - "docker-modem": ["docker-modem@5.0.6", "", { "dependencies": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", "ssh2": "^1.15.0" } }, "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ=="], - - "dockerode": ["dockerode@4.0.7", "", { "dependencies": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", "docker-modem": "^5.0.6", "protobufjs": "^7.3.2", "tar-fs": "~2.1.2", "uuid": "^10.0.0" } }, "sha512-R+rgrSRTRdU5mH14PZTCPZtW/zw3HDWNTS/1ZAQpL/5Upe/ye5K9WQkIysu4wBoiMwKynsz0a8qWuGsHgEvSAA=="], - - "doctrine": ["doctrine@3.0.0", "", { "dependencies": { "esutils": "^2.0.2" } }, "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w=="], - - "dom-serialize": ["dom-serialize@2.2.1", "", { "dependencies": { "custom-event": "~1.0.0", "ent": "~2.2.0", "extend": "^3.0.0", "void-elements": "^2.0.0" } }, "sha512-Yra4DbvoW7/Z6LBN560ZwXMjoNOSAN2wRsKFGc4iBeso+mpIA6qj1vfdf9HpMaKAqG6wXTy+1SYEzmNpKXOSsQ=="], - - "dom-serializer": ["dom-serializer@2.0.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "entities": "^4.2.0" } }, "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg=="], - - "domelementtype": ["domelementtype@2.3.0", "", {}, "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw=="], - - "domhandler": ["domhandler@5.0.3", "", { "dependencies": { "domelementtype": "^2.3.0" } }, "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w=="], - - "domutils": ["domutils@3.2.2", "", { "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3" } }, "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw=="], - - "dotenv": ["dotenv@16.5.0", "", {}, "sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg=="], - - "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], - - "eastasianwidth": ["eastasianwidth@0.2.0", "", {}, "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="], - - "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], - - "electron-to-chromium": ["electron-to-chromium@1.5.166", "", {}, "sha512-QPWqHL0BglzPYyJJ1zSSmwFFL6MFXhbACOCcsCdUMCkzPdS9/OIBVxg516X/Ado2qwAq8k0nJJ7phQPCqiaFAw=="], - - "emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], - - "encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="], - - "encoding": ["encoding@0.1.13", "", { "dependencies": { "iconv-lite": "^0.6.2" } }, "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A=="], - - "end-of-stream": ["end-of-stream@1.4.4", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q=="], - - "engine.io": ["engine.io@6.6.4", "", { "dependencies": { "@types/cors": "^2.8.12", "@types/node": ">=10.0.0", "accepts": "~1.3.4", "base64id": "2.0.0", "cookie": "~0.7.2", "cors": "~2.8.5", "debug": "~4.3.1", "engine.io-parser": "~5.2.1", "ws": "~8.17.1" } }, "sha512-ZCkIjSYNDyGn0R6ewHDtXgns/Zre/NT6Agvq1/WobF7JXgFff4SeDroKiCO3fNJreU9YG429Sc81o4w5ok/W5g=="], - - "engine.io-parser": ["engine.io-parser@5.2.3", "", {}, "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q=="], - - "enhanced-resolve": ["enhanced-resolve@5.18.1", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg=="], - - "ent": ["ent@2.2.2", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "punycode": "^1.4.1", "safe-regex-test": "^1.1.0" } }, "sha512-kKvD1tO6BM+oK9HzCPpUdRb4vKFQY/FPTFmurMvh6LlN68VMrdj77w8yp51/kDbpkFOS9J8w5W6zIzgM2H8/hw=="], - - "entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="], - - "env-paths": ["env-paths@2.2.1", "", {}, "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A=="], - - "environment": ["environment@1.1.0", "", {}, "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q=="], - - "err-code": ["err-code@2.0.3", "", {}, "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA=="], - - "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], - - "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], - - "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="], - - "es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="], - - "esbuild": ["esbuild@0.25.5", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.5", "@esbuild/android-arm": "0.25.5", "@esbuild/android-arm64": "0.25.5", "@esbuild/android-x64": "0.25.5", "@esbuild/darwin-arm64": "0.25.5", "@esbuild/darwin-x64": "0.25.5", "@esbuild/freebsd-arm64": "0.25.5", "@esbuild/freebsd-x64": "0.25.5", "@esbuild/linux-arm": "0.25.5", "@esbuild/linux-arm64": "0.25.5", "@esbuild/linux-ia32": "0.25.5", "@esbuild/linux-loong64": "0.25.5", "@esbuild/linux-mips64el": "0.25.5", "@esbuild/linux-ppc64": "0.25.5", "@esbuild/linux-riscv64": "0.25.5", "@esbuild/linux-s390x": "0.25.5", "@esbuild/linux-x64": "0.25.5", "@esbuild/netbsd-arm64": "0.25.5", "@esbuild/netbsd-x64": "0.25.5", "@esbuild/openbsd-arm64": "0.25.5", "@esbuild/openbsd-x64": "0.25.5", "@esbuild/sunos-x64": "0.25.5", "@esbuild/win32-arm64": "0.25.5", "@esbuild/win32-ia32": "0.25.5", "@esbuild/win32-x64": "0.25.5" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ=="], - - "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="], - - "escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="], - - "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], - - "eslint": ["eslint@8.57.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", "@eslint/js": "8.57.1", "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.2.2", "eslint-visitor-keys": "^3.4.3", "espree": "^9.6.1", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "globals": "^13.19.0", "graphemer": "^1.4.0", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3", "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "bin": { "eslint": "bin/eslint.js" } }, "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA=="], - - "eslint-scope": ["eslint-scope@7.2.2", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg=="], - - "eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], - - "espree": ["espree@9.6.1", "", { "dependencies": { "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^3.4.1" } }, "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ=="], - - "esquery": ["esquery@1.6.0", "", { "dependencies": { "estraverse": "^5.1.0" } }, "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="], - - "esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="], - - "estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="], - - "esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="], - - "event-target-shim": ["event-target-shim@5.0.1", "", {}, "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ=="], - - "eventemitter3": ["eventemitter3@5.0.1", "", {}, "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="], - - "events": ["events@3.3.0", "", {}, "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="], - - "exponential-backoff": ["exponential-backoff@3.1.2", "", {}, "sha512-8QxYTVXUkuy7fIIoitQkPwGonB8F3Zj8eEO8Sqg9Zv/bkI7RJAzowee4gr81Hak/dUTpA2Z7VfQgoijjPNlUZA=="], - - "extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="], - - "external-editor": ["external-editor@3.1.0", "", { "dependencies": { "chardet": "^0.7.0", "iconv-lite": "^0.4.24", "tmp": "^0.0.33" } }, "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew=="], - - "fast-copy": ["fast-copy@3.0.2", "", {}, "sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ=="], - - "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], - - "fast-fifo": ["fast-fifo@1.3.2", "", {}, "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ=="], - - "fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="], - - "fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="], - - "fast-levenshtein": ["fast-levenshtein@2.0.6", "", {}, "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="], - - "fast-redact": ["fast-redact@3.5.0", "", {}, "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A=="], - - "fast-safe-stringify": ["fast-safe-stringify@2.1.1", "", {}, "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA=="], - - "fast-uri": ["fast-uri@3.0.6", "", {}, "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw=="], - - "fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="], - - "fdir": ["fdir@6.4.5", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-4BG7puHpVsIYxZUbiUE3RqGloLaSSwzYie5jvasC4LWuBWzZawynvYouhjbQKw2JuIGYdm0DzIxl8iVidKlUEw=="], - - "file-entry-cache": ["file-entry-cache@6.0.1", "", { "dependencies": { "flat-cache": "^3.0.4" } }, "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg=="], - - "fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="], - - "finalhandler": ["finalhandler@1.1.2", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "~2.3.0", "parseurl": "~1.3.3", "statuses": "~1.5.0", "unpipe": "~1.0.0" } }, "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA=="], - - "find-cache-dir": ["find-cache-dir@3.3.2", "", { "dependencies": { "commondir": "^1.0.1", "make-dir": "^3.0.2", "pkg-dir": "^4.1.0" } }, "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig=="], - - "find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="], - - "flat-cache": ["flat-cache@3.2.0", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.3", "rimraf": "^3.0.2" } }, "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw=="], - - "flatted": ["flatted@3.3.3", "", {}, "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="], - - "follow-redirects": ["follow-redirects@1.15.9", "", {}, "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ=="], - - "foreground-child": ["foreground-child@3.3.1", "", { "dependencies": { "cross-spawn": "^7.0.6", "signal-exit": "^4.0.1" } }, "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw=="], - - "form-data": ["form-data@4.0.3", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, "sha512-qsITQPfmvMOSAdeyZ+12I1c+CKSstAFAwu+97zrnWAbIr5u8wfsExUzCesVLC8NgHuRUqNN4Zy6UPWUTRGslcA=="], - - "form-data-encoder": ["form-data-encoder@4.1.0", "", {}, "sha512-G6NsmEW15s0Uw9XnCg+33H3ViYRyiM0hMrMhhqQOR8NFc5GhYrI+6I3u7OTw7b91J2g8rtvMBZJDbcGb2YUniw=="], - - "formidable": ["formidable@2.1.5", "", { "dependencies": { "@paralleldrive/cuid2": "^2.2.2", "dezalgo": "^1.0.4", "once": "^1.4.0", "qs": "^6.11.0" } }, "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q=="], - - "fraction.js": ["fraction.js@4.3.7", "", {}, "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew=="], - - "fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="], - - "fs-extra": ["fs-extra@8.1.0", "", { "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^4.0.0", "universalify": "^0.1.0" } }, "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g=="], - - "fs-minipass": ["fs-minipass@3.0.3", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw=="], - - "fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="], - - "fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], - - "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], - - "functional-red-black-tree": ["functional-red-black-tree@1.0.1", "", {}, "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g=="], - - "gensync": ["gensync@1.0.0-beta.2", "", {}, "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="], - - "get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="], - - "get-east-asian-width": ["get-east-asian-width@1.3.0", "", {}, "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ=="], - - "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="], - - "get-port": ["get-port@7.1.0", "", {}, "sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw=="], - - "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], - - "get-stream": ["get-stream@9.0.1", "", { "dependencies": { "@sec-ant/readable-stream": "^0.4.1", "is-stream": "^4.0.1" } }, "sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA=="], - - "glob": ["glob@7.2.3", "", { "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="], - - "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], - - "glob-to-regexp": ["glob-to-regexp@0.4.1", "", {}, "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw=="], - - "globals": ["globals@13.24.0", "", { "dependencies": { "type-fest": "^0.20.2" } }, "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ=="], - - "globby": ["globby@11.1.0", "", { "dependencies": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", "fast-glob": "^3.2.9", "ignore": "^5.2.0", "merge2": "^1.4.1", "slash": "^3.0.0" } }, "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g=="], - - "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], - - "got": ["got@14.4.7", "", { "dependencies": { "@sindresorhus/is": "^7.0.1", "@szmarczak/http-timer": "^5.0.1", "cacheable-lookup": "^7.0.0", "cacheable-request": "^12.0.1", "decompress-response": "^6.0.0", "form-data-encoder": "^4.0.2", "http2-wrapper": "^2.2.1", "lowercase-keys": "^3.0.0", "p-cancelable": "^4.0.1", "responselike": "^3.0.0", "type-fest": "^4.26.1" } }, "sha512-DI8zV1231tqiGzOiOzQWDhsBmncFW7oQDH6Zgy6pDPrqJuVZMtoSgPLLsBZQj8Jg4JFfwoOsDA8NGtLQLnIx2g=="], - - "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="], - - "graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="], - - "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], - - "has-property-descriptors": ["has-property-descriptors@1.0.2", "", { "dependencies": { "es-define-property": "^1.0.0" } }, "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg=="], - - "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], - - "has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="], - - "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], - - "help-me": ["help-me@5.0.0", "", {}, "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg=="], - - "hono": ["hono@4.7.11", "", {}, "sha512-rv0JMwC0KALbbmwJDEnxvQCeJh+xbS3KEWW5PC9cMJ08Ur9xgatI0HmtgYZfOdOSOeYsp5LO2cOhdI8cLEbDEQ=="], - - "hosted-git-info": ["hosted-git-info@8.1.0", "", { "dependencies": { "lru-cache": "^10.0.1" } }, "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw=="], - - "html-escaper": ["html-escaper@2.0.2", "", {}, "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg=="], - - "htmlparser2": ["htmlparser2@10.0.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.2.1", "entities": "^6.0.0" } }, "sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g=="], - - "http-cache-semantics": ["http-cache-semantics@4.2.0", "", {}, "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ=="], - - "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], - - "http-proxy": ["http-proxy@1.18.1", "", { "dependencies": { "eventemitter3": "^4.0.0", "follow-redirects": "^1.0.0", "requires-port": "^1.0.0" } }, "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ=="], - - "http-proxy-agent": ["http-proxy-agent@7.0.2", "", { "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" } }, "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="], - - "http2-wrapper": ["http2-wrapper@2.2.1", "", { "dependencies": { "quick-lru": "^5.1.1", "resolve-alpn": "^1.2.0" } }, "sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ=="], - - "https-proxy-agent": ["https-proxy-agent@7.0.6", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="], - - "iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="], - - "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], - - "ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="], - - "ignore-walk": ["ignore-walk@7.0.0", "", { "dependencies": { "minimatch": "^9.0.0" } }, "sha512-T4gbf83A4NH95zvhVYZc+qWocBBGlpzUXLPGurJggw/WIOwicfXJChLDP/iBZnN5WqROSu5Bm3hhle4z8a8YGQ=="], - - "immutable": ["immutable@4.3.7", "", {}, "sha512-1hqclzwYwjRDFLjcFxOM5AYkkG0rpFPpr1RLPMEuGczoS7YA8gLhy8SWXYRAA/XwfEHpfo3cw5JGioS32fnMRw=="], - - "import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="], - - "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], - - "inflight": ["inflight@1.0.6", "", { "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="], - - "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], - - "ini": ["ini@5.0.0", "", {}, "sha512-+N0ngpO3e7cRUWOJAS7qw0IZIVc6XPrW4MlFBdD066F2L4k1L6ker3hLqSq7iXxU5tgS4WGkIUElWn5vogAEnw=="], - - "ioredis": ["ioredis@5.6.1", "", { "dependencies": { "@ioredis/commands": "^1.1.1", "cluster-key-slot": "^1.1.0", "debug": "^4.3.4", "denque": "^2.1.0", "lodash.defaults": "^4.2.0", "lodash.isarguments": "^3.1.0", "redis-errors": "^1.2.0", "redis-parser": "^3.0.0", "standard-as-callback": "^2.1.0" } }, "sha512-UxC0Yv1Y4WRJiGQxQkP0hfdL0/5/6YvdfOOClRgJ0qppSarkhneSa6UvkMkms0AkdGimSH3Ikqm+6mkMmX7vGA=="], - - "ip-address": ["ip-address@9.0.5", "", { "dependencies": { "jsbn": "1.1.0", "sprintf-js": "^1.1.3" } }, "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g=="], - - "is-binary-path": ["is-binary-path@2.1.0", "", { "dependencies": { "binary-extensions": "^2.0.0" } }, "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="], - - "is-core-module": ["is-core-module@2.16.1", "", { "dependencies": { "hasown": "^2.0.2" } }, "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w=="], - - "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="], - - "is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="], - - "is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="], - - "is-interactive": ["is-interactive@2.0.0", "", {}, "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ=="], - - "is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="], - - "is-path-inside": ["is-path-inside@3.0.3", "", {}, "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ=="], - - "is-regex": ["is-regex@1.2.1", "", { "dependencies": { "call-bound": "^1.0.2", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g=="], - - "is-stream": ["is-stream@4.0.1", "", {}, "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A=="], - - "is-unicode-supported": ["is-unicode-supported@2.1.0", "", {}, "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ=="], - - "isarray": ["isarray@2.0.5", "", {}, "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw=="], - - "isbinaryfile": ["isbinaryfile@4.0.10", "", {}, "sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw=="], - - "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], - - "istanbul-lib-coverage": ["istanbul-lib-coverage@3.2.2", "", {}, "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg=="], - - "istanbul-lib-instrument": ["istanbul-lib-instrument@6.0.3", "", { "dependencies": { "@babel/core": "^7.23.9", "@babel/parser": "^7.23.9", "@istanbuljs/schema": "^0.1.3", "istanbul-lib-coverage": "^3.2.0", "semver": "^7.5.4" } }, "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q=="], - - "istanbul-lib-report": ["istanbul-lib-report@3.0.1", "", { "dependencies": { "istanbul-lib-coverage": "^3.0.0", "make-dir": "^4.0.0", "supports-color": "^7.1.0" } }, "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw=="], - - "istanbul-lib-source-maps": ["istanbul-lib-source-maps@4.0.1", "", { "dependencies": { "debug": "^4.1.1", "istanbul-lib-coverage": "^3.0.0", "source-map": "^0.6.1" } }, "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw=="], - - "istanbul-reports": ["istanbul-reports@3.1.7", "", { "dependencies": { "html-escaper": "^2.0.0", "istanbul-lib-report": "^3.0.0" } }, "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g=="], - - "jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], - - "jasmine-core": ["jasmine-core@5.7.1", "", {}, "sha512-QnurrtpKsPoixxG2R3d1xP0St/2kcX5oTZyDyQJMY+Vzi/HUlu1kGm+2V8Tz+9lV991leB1l0xcsyz40s9xOOw=="], - - "jiti": ["jiti@2.4.2", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A=="], - - "joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="], - - "js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="], - - "js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="], - - "jsbn": ["jsbn@1.1.0", "", {}, "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A=="], - - "jsesc": ["jsesc@3.1.0", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA=="], - - "json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="], - - "json-parse-even-better-errors": ["json-parse-even-better-errors@4.0.0", "", {}, "sha512-lR4MXjGNgkJc7tkQ97kb2nuEMnNCyU//XYVH0MKTGcXEiSudQ5MKGKen3C5QubYy0vmq+JGitUg92uuywGEwIA=="], - - "json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], - - "json-stable-stringify": ["json-stable-stringify@1.3.0", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.4", "isarray": "^2.0.5", "jsonify": "^0.0.1", "object-keys": "^1.1.1" } }, "sha512-qtYiSSFlwot9XHtF9bD9c7rwKjr+RecWT//ZnPvSmEjpV5mmPOCN4j8UjY5hbjNkOwZ/jQv3J6R1/pL7RwgMsg=="], - - "json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="], - - "json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="], - - "jsonc-parser": ["jsonc-parser@3.3.1", "", {}, "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ=="], - - "jsonfile": ["jsonfile@4.0.0", "", { "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg=="], - - "jsonify": ["jsonify@0.0.1", "", {}, "sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg=="], - - "jsonparse": ["jsonparse@1.3.1", "", {}, "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg=="], - - "karma": ["karma@6.4.4", "", { "dependencies": { "@colors/colors": "1.5.0", "body-parser": "^1.19.0", "braces": "^3.0.2", "chokidar": "^3.5.1", "connect": "^3.7.0", "di": "^0.0.1", "dom-serialize": "^2.2.1", "glob": "^7.1.7", "graceful-fs": "^4.2.6", "http-proxy": "^1.18.1", "isbinaryfile": "^4.0.8", "lodash": "^4.17.21", "log4js": "^6.4.1", "mime": "^2.5.2", "minimatch": "^3.0.4", "mkdirp": "^0.5.5", "qjobs": "^1.2.0", "range-parser": "^1.2.1", "rimraf": "^3.0.2", "socket.io": "^4.7.2", "source-map": "^0.6.1", "tmp": "^0.2.1", "ua-parser-js": "^0.7.30", "yargs": "^16.1.1" }, "bin": { "karma": "bin/karma" } }, "sha512-LrtUxbdvt1gOpo3gxG+VAJlJAEMhbWlM4YrFQgql98FwF7+K8K12LYO4hnDdUkNjeztYrOXEMqgTajSWgmtI/w=="], - - "karma-chrome-launcher": ["karma-chrome-launcher@3.2.0", "", { "dependencies": { "which": "^1.2.1" } }, "sha512-rE9RkUPI7I9mAxByQWkGJFXfFD6lE4gC5nPuZdobf/QdTEJI6EU4yIay/cfU/xV4ZxlM5JiTv7zWYgA64NpS5Q=="], - - "karma-coverage": ["karma-coverage@2.2.1", "", { "dependencies": { "istanbul-lib-coverage": "^3.2.0", "istanbul-lib-instrument": "^5.1.0", "istanbul-lib-report": "^3.0.0", "istanbul-lib-source-maps": "^4.0.1", "istanbul-reports": "^3.0.5", "minimatch": "^3.0.4" } }, "sha512-yj7hbequkQP2qOSb20GuNSIyE//PgJWHwC2IydLE6XRtsnaflv+/OSGNssPjobYUlhVVagy99TQpqUt3vAUG7A=="], - - "karma-jasmine": ["karma-jasmine@5.1.0", "", { "dependencies": { "jasmine-core": "^4.1.0" }, "peerDependencies": { "karma": "^6.0.0" } }, "sha512-i/zQLFrfEpRyQoJF9fsCdTMOF5c2dK7C7OmsuKg2D0YSsuZSfQDiLuaiktbuio6F2wiCsZSnSnieIQ0ant/uzQ=="], - - "karma-jasmine-html-reporter": ["karma-jasmine-html-reporter@2.1.0", "", { "peerDependencies": { "jasmine-core": "^4.0.0 || ^5.0.0", "karma": "^6.0.0", "karma-jasmine": "^5.0.0" } }, "sha512-sPQE1+nlsn6Hwb5t+HHwyy0A1FNCVKuL1192b+XNauMYWThz2kweiBVW1DqloRpVvZIJkIoHVB7XRpK78n1xbQ=="], - - "keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="], - - "lazystream": ["lazystream@1.0.1", "", { "dependencies": { "readable-stream": "^2.0.5" } }, "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw=="], - - "levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="], - - "lightningcss": ["lightningcss@1.30.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-darwin-arm64": "1.30.1", "lightningcss-darwin-x64": "1.30.1", "lightningcss-freebsd-x64": "1.30.1", "lightningcss-linux-arm-gnueabihf": "1.30.1", "lightningcss-linux-arm64-gnu": "1.30.1", "lightningcss-linux-arm64-musl": "1.30.1", "lightningcss-linux-x64-gnu": "1.30.1", "lightningcss-linux-x64-musl": "1.30.1", "lightningcss-win32-arm64-msvc": "1.30.1", "lightningcss-win32-x64-msvc": "1.30.1" } }, "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg=="], - - "lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ=="], - - "lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA=="], - - "lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig=="], - - "lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.1", "", { "os": "linux", "cpu": "arm" }, "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q=="], - - "lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw=="], - - "lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ=="], - - "lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw=="], - - "lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ=="], - - "lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA=="], - - "lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.1", "", { "os": "win32", "cpu": "x64" }, "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg=="], - - "listr2": ["listr2@8.3.3", "", { "dependencies": { "cli-truncate": "^4.0.0", "colorette": "^2.0.20", "eventemitter3": "^5.0.1", "log-update": "^6.1.0", "rfdc": "^1.4.1", "wrap-ansi": "^9.0.0" } }, "sha512-LWzX2KsqcB1wqQ4AHgYb4RsDXauQiqhjLk+6hjbaeHG4zpjjVAB6wC/gz6X0l+Du1cN3pUB5ZlrvTbhGSNnUQQ=="], - - "lmdb": ["lmdb@3.3.0", "", { "dependencies": { "msgpackr": "^1.11.2", "node-addon-api": "^6.1.0", "node-gyp-build-optional-packages": "5.2.2", "ordered-binary": "^1.5.3", "weak-lru-cache": "^1.2.2" }, "optionalDependencies": { "@lmdb/lmdb-darwin-arm64": "3.3.0", "@lmdb/lmdb-darwin-x64": "3.3.0", "@lmdb/lmdb-linux-arm": "3.3.0", "@lmdb/lmdb-linux-arm64": "3.3.0", "@lmdb/lmdb-linux-x64": "3.3.0", "@lmdb/lmdb-win32-arm64": "3.3.0", "@lmdb/lmdb-win32-x64": "3.3.0" }, "bin": { "download-lmdb-prebuilds": "bin/download-prebuilds.js" } }, "sha512-MgJocUI6QEiSXQBFWLeyo1R7eQj8Rke5dlPxX0KFwli8/bsCxpM/KbXO5y0qmV/5llQ3wpneDWcTYxa+4vn8iQ=="], - - "locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="], - - "lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="], - - "lodash.camelcase": ["lodash.camelcase@4.3.0", "", {}, "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="], - - "lodash.defaults": ["lodash.defaults@4.2.0", "", {}, "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="], - - "lodash.isarguments": ["lodash.isarguments@3.1.0", "", {}, "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg=="], - - "lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="], - - "log-symbols": ["log-symbols@6.0.0", "", { "dependencies": { "chalk": "^5.3.0", "is-unicode-supported": "^1.3.0" } }, "sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw=="], - - "log-update": ["log-update@6.1.0", "", { "dependencies": { "ansi-escapes": "^7.0.0", "cli-cursor": "^5.0.0", "slice-ansi": "^7.1.0", "strip-ansi": "^7.1.0", "wrap-ansi": "^9.0.0" } }, "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w=="], - - "log4js": ["log4js@6.9.1", "", { "dependencies": { "date-format": "^4.0.14", "debug": "^4.3.4", "flatted": "^3.2.7", "rfdc": "^1.3.0", "streamroller": "^3.1.5" } }, "sha512-1somDdy9sChrr9/f4UlzhdaGfDR2c/SaD2a4T7qEkG4jTS57/B3qmnjLYePwQ8cqWnUHZI0iAKxMBpCZICiZ2g=="], - - "long": ["long@5.3.2", "", {}, "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="], - - "lowercase-keys": ["lowercase-keys@3.0.0", "", {}, "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ=="], - - "lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="], - - "luxon": ["luxon@3.6.1", "", {}, "sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ=="], - - "magic-string": ["magic-string@0.30.17", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0" } }, "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA=="], - - "make-dir": ["make-dir@4.0.0", "", { "dependencies": { "semver": "^7.5.3" } }, "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw=="], - - "make-fetch-happen": ["make-fetch-happen@14.0.3", "", { "dependencies": { "@npmcli/agent": "^3.0.0", "cacache": "^19.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "ssri": "^12.0.0" } }, "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ=="], - - "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], - - "media-typer": ["media-typer@0.3.0", "", {}, "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="], - - "memory-pager": ["memory-pager@1.5.0", "", {}, "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg=="], - - "merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="], - - "methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="], - - "micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="], - - "mime": ["mime@2.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg=="], - - "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], - - "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], - - "mimic-function": ["mimic-function@5.0.1", "", {}, "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA=="], - - "mimic-response": ["mimic-response@4.0.0", "", {}, "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg=="], - - "minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="], - - "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="], - - "minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="], - - "minipass-collect": ["minipass-collect@2.0.1", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw=="], - - "minipass-fetch": ["minipass-fetch@4.0.1", "", { "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^3.0.1" }, "optionalDependencies": { "encoding": "^0.1.13" } }, "sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ=="], - - "minipass-flush": ["minipass-flush@1.0.5", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw=="], - - "minipass-pipeline": ["minipass-pipeline@1.2.4", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A=="], - - "minipass-sized": ["minipass-sized@1.0.3", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g=="], - - "minizlib": ["minizlib@3.0.2", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA=="], - - "mkdirp": ["mkdirp@0.5.6", "", { "dependencies": { "minimist": "^1.2.6" }, "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw=="], - - "mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="], - - "moment": ["moment@2.30.1", "", {}, "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how=="], - - "mongodb": ["mongodb@6.17.0", "", { "dependencies": { "@mongodb-js/saslprep": "^1.1.9", "bson": "^6.10.4", "mongodb-connection-string-url": "^3.0.0" }, "peerDependencies": { "@aws-sdk/credential-providers": "^3.188.0", "@mongodb-js/zstd": "^1.1.0 || ^2.0.0", "gcp-metadata": "^5.2.0", "kerberos": "^2.0.1", "mongodb-client-encryption": ">=6.0.0 <7", "snappy": "^7.2.2", "socks": "^2.7.1" }, "optionalPeers": ["@aws-sdk/credential-providers", "@mongodb-js/zstd", "gcp-metadata", "kerberos", "mongodb-client-encryption", "snappy", "socks"] }, "sha512-neerUzg/8U26cgruLysKEjJvoNSXhyID3RvzvdcpsIi2COYM3FS3o9nlH7fxFtefTb942dX3W9i37oPfCVj4wA=="], - - "mongodb-connection-string-url": ["mongodb-connection-string-url@3.0.2", "", { "dependencies": { "@types/whatwg-url": "^11.0.2", "whatwg-url": "^14.1.0 || ^13.0.0" } }, "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA=="], - - "mongodb-memory-server": ["mongodb-memory-server@9.5.0", "", { "dependencies": { "mongodb-memory-server-core": "9.5.0", "tslib": "^2.6.3" } }, "sha512-In3zRT40cLlVtpy7FK6b96Lby6JBAdXj8Kf9YrH4p1Aa2X4ptojq7SmiRR3x47Lo0/UCXXIwhJpkdbYY8kRZAw=="], - - "mongodb-memory-server-core": ["mongodb-memory-server-core@9.5.0", "", { "dependencies": { "async-mutex": "^0.4.1", "camelcase": "^6.3.0", "debug": "^4.3.7", "find-cache-dir": "^3.3.2", "follow-redirects": "^1.15.9", "https-proxy-agent": "^7.0.5", "mongodb": "^5.9.2", "new-find-package-json": "^2.0.0", "semver": "^7.6.3", "tar-stream": "^3.1.7", "tslib": "^2.6.3", "yauzl": "^3.1.3" } }, "sha512-Jb/V80JeYAKWaF4bPFme7SmTR6ew1PWgkpPUepLDfRraeN49i1cruxICeA4zz4T33W/o31N+zazP8wI8ebf7yw=="], - - "moo": ["moo@0.5.2", "", {}, "sha512-iSAJLHYKnX41mKcJKjqvnAN9sf0LMDTXDEvFv+ffuRR9a1MIuXLjMNL6EsnDHSkKLTWNqQQ5uo61P4EbU4NU+Q=="], - - "mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="], - - "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], - - "msgpackr": ["msgpackr@1.11.4", "", { "optionalDependencies": { "msgpackr-extract": "^3.0.2" } }, "sha512-uaff7RG9VIC4jacFW9xzL3jc0iM32DNHe4jYVycBcjUePT/Klnfj7pqtWJt9khvDFizmjN2TlYniYmSS2LIaZg=="], - - "msgpackr-extract": ["msgpackr-extract@3.0.3", "", { "dependencies": { "node-gyp-build-optional-packages": "5.2.2" }, "optionalDependencies": { "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" }, "bin": { "download-msgpackr-prebuilds": "bin/download-prebuilds.js" } }, "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA=="], - - "mute-stream": ["mute-stream@2.0.0", "", {}, "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA=="], - - "nan": ["nan@2.22.2", "", {}, "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ=="], - - "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="], - - "natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="], - - "nearley": ["nearley@2.20.1", "", { "dependencies": { "commander": "^2.19.0", "moo": "^0.5.0", "railroad-diagrams": "^1.0.0", "randexp": "0.4.6" }, "bin": { "nearleyc": "bin/nearleyc.js", "nearley-test": "bin/nearley-test.js", "nearley-unparse": "bin/nearley-unparse.js", "nearley-railroad": "bin/nearley-railroad.js" } }, "sha512-+Mc8UaAebFzgV+KpI5n7DasuuQCHA89dmwm7JXw3TV43ukfNQ9DnBH3Mdb2g/I4Fdxc26pwimBWvjIw0UAILSQ=="], - - "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], - - "new-find-package-json": ["new-find-package-json@2.0.0", "", { "dependencies": { "debug": "^4.3.4" } }, "sha512-lDcBsjBSMlj3LXH2v/FW3txlh2pYTjmbOXPYJD93HI5EwuLzI11tdHSIpUMmfq/IOsldj4Ps8M8flhm+pCK4Ew=="], - - "node-abort-controller": ["node-abort-controller@3.1.1", "", {}, "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ=="], - - "node-addon-api": ["node-addon-api@6.1.0", "", {}, "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA=="], - - "node-gyp": ["node-gyp@11.2.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", "make-fetch-happen": "^14.0.3", "nopt": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "tar": "^7.4.3", "tinyglobby": "^0.2.12", "which": "^5.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-T0S1zqskVUSxcsSTkAsLc7xCycrRYmtDHadDinzocrThjyQCn5kMlEBSj6H4qDbgsIOSLmmlRIeb0lZXj+UArA=="], - - "node-gyp-build-optional-packages": ["node-gyp-build-optional-packages@5.2.2", "", { "dependencies": { "detect-libc": "^2.0.1" }, "bin": { "node-gyp-build-optional-packages": "bin.js", "node-gyp-build-optional-packages-optional": "optional.js", "node-gyp-build-optional-packages-test": "build-test.js" } }, "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw=="], - - "node-releases": ["node-releases@2.0.19", "", {}, "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw=="], - - "nopt": ["nopt@8.1.0", "", { "dependencies": { "abbrev": "^3.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A=="], - - "normalize-path": ["normalize-path@3.0.0", "", {}, "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="], - - "normalize-range": ["normalize-range@0.1.2", "", {}, "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA=="], - - "normalize-url": ["normalize-url@8.0.2", "", {}, "sha512-Ee/R3SyN4BuynXcnTaekmaVdbDAEiNrHqjQIA37mHU8G9pf7aaAD4ZX3XjBLo6rsdcxA/gtkcNYZLt30ACgynw=="], - - "npm-bundled": ["npm-bundled@4.0.0", "", { "dependencies": { "npm-normalize-package-bin": "^4.0.0" } }, "sha512-IxaQZDMsqfQ2Lz37VvyyEtKLe8FsRZuysmedy/N06TU1RyVppYKXrO4xIhR0F+7ubIBox6Q7nir6fQI3ej39iA=="], - - "npm-install-checks": ["npm-install-checks@7.1.1", "", { "dependencies": { "semver": "^7.1.1" } }, "sha512-u6DCwbow5ynAX5BdiHQ9qvexme4U3qHW3MWe5NqH+NeBm0LbiH6zvGjNNew1fY+AZZUtVHbOPF3j7mJxbUzpXg=="], - - "npm-normalize-package-bin": ["npm-normalize-package-bin@4.0.0", "", {}, "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w=="], - - "npm-package-arg": ["npm-package-arg@12.0.2", "", { "dependencies": { "hosted-git-info": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "validate-npm-package-name": "^6.0.0" } }, "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA=="], - - "npm-packlist": ["npm-packlist@10.0.0", "", { "dependencies": { "ignore-walk": "^7.0.0" } }, "sha512-rht9U6nS8WOBDc53eipZNPo5qkAV4X2rhKE2Oj1DYUQ3DieXfj0mKkVmjnf3iuNdtMd8WfLdi2L6ASkD/8a+Kg=="], - - "npm-pick-manifest": ["npm-pick-manifest@10.0.0", "", { "dependencies": { "npm-install-checks": "^7.1.0", "npm-normalize-package-bin": "^4.0.0", "npm-package-arg": "^12.0.0", "semver": "^7.3.5" } }, "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ=="], - - "npm-registry-fetch": ["npm-registry-fetch@18.0.2", "", { "dependencies": { "@npmcli/redact": "^3.0.0", "jsonparse": "^1.3.1", "make-fetch-happen": "^14.0.0", "minipass": "^7.0.2", "minipass-fetch": "^4.0.0", "minizlib": "^3.0.1", "npm-package-arg": "^12.0.0", "proc-log": "^5.0.0" } }, "sha512-LeVMZBBVy+oQb5R6FDV9OlJCcWDU+al10oKpe+nsvcHnG24Z3uM3SvJYKfGJlfGjVU8v9liejCrUR/M5HO5NEQ=="], - - "nth-check": ["nth-check@2.1.1", "", { "dependencies": { "boolbase": "^1.0.0" } }, "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w=="], - - "object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], - - "object-hash": ["object-hash@2.2.0", "", {}, "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw=="], - - "object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="], - - "object-keys": ["object-keys@1.1.1", "", {}, "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA=="], - - "on-exit-leak-free": ["on-exit-leak-free@2.1.2", "", {}, "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA=="], - - "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], - - "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], - - "onetime": ["onetime@7.0.0", "", { "dependencies": { "mimic-function": "^5.0.0" } }, "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="], - - "optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="], - - "ora": ["ora@8.2.0", "", { "dependencies": { "chalk": "^5.3.0", "cli-cursor": "^5.0.0", "cli-spinners": "^2.9.2", "is-interactive": "^2.0.0", "is-unicode-supported": "^2.0.0", "log-symbols": "^6.0.0", "stdin-discarder": "^0.2.2", "string-width": "^7.2.0", "strip-ansi": "^7.1.0" } }, "sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw=="], - - "ordered-binary": ["ordered-binary@1.5.3", "", {}, "sha512-oGFr3T+pYdTGJ+YFEILMpS3es+GiIbs9h/XQrclBXUtd44ey7XwfsMzM31f64I1SQOawDoDr/D823kNCADI8TA=="], - - "os-tmpdir": ["os-tmpdir@1.0.2", "", {}, "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g=="], - - "p-cancelable": ["p-cancelable@4.0.1", "", {}, "sha512-wBowNApzd45EIKdO1LaU+LrMBwAcjfPaYtVzV3lmfM3gf8Z4CHZsiIqlM8TZZ8okYvh5A1cP6gTfCRQtwUpaUg=="], - - "p-limit": ["p-limit@6.2.0", "", { "dependencies": { "yocto-queue": "^1.1.1" } }, "sha512-kuUqqHNUqoIWp/c467RI4X6mmyuojY5jGutNU0wVTmEOOfcuwLqyMVoAi9MKi2Ak+5i9+nhmrK4ufZE8069kHA=="], - - "p-locate": ["p-locate@5.0.0", "", { "dependencies": { "p-limit": "^3.0.2" } }, "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw=="], - - "p-map": ["p-map@7.0.3", "", {}, "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA=="], - - "p-try": ["p-try@2.2.0", "", {}, "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ=="], - - "package-json-from-dist": ["package-json-from-dist@1.0.1", "", {}, "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw=="], - - "pacote": ["pacote@21.0.0", "", { "dependencies": { "@npmcli/git": "^6.0.0", "@npmcli/installed-package-contents": "^3.0.0", "@npmcli/package-json": "^6.0.0", "@npmcli/promise-spawn": "^8.0.0", "@npmcli/run-script": "^9.0.0", "cacache": "^19.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", "npm-package-arg": "^12.0.0", "npm-packlist": "^10.0.0", "npm-pick-manifest": "^10.0.0", "npm-registry-fetch": "^18.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "sigstore": "^3.0.0", "ssri": "^12.0.0", "tar": "^6.1.11" }, "bin": { "pacote": "bin/index.js" } }, "sha512-lcqexq73AMv6QNLo7SOpz0JJoaGdS3rBFgF122NZVl1bApo2mfu+XzUBU/X/XsiJu+iUmKpekRayqQYAs+PhkA=="], - - "parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="], - - "parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="], - - "parse5-html-rewriting-stream": ["parse5-html-rewriting-stream@7.1.0", "", { "dependencies": { "entities": "^6.0.0", "parse5": "^7.0.0", "parse5-sax-parser": "^7.0.0" } }, "sha512-2ifK6Jb+ONoqOy5f+cYHsqvx1obHQdvIk13Jmt/5ezxP0U9p+fqd+R6O73KblGswyuzBYfetmsfK9ThMgnuPPg=="], - - "parse5-sax-parser": ["parse5-sax-parser@7.0.0", "", { "dependencies": { "parse5": "^7.0.0" } }, "sha512-5A+v2SNsq8T6/mG3ahcz8ZtQ0OUFTatxPbeidoMB7tkJSGDY3tdfl4MHovtLQHkEn5CGxijNWRQHhRQ6IRpXKg=="], - - "parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="], - - "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], - - "path-is-absolute": ["path-is-absolute@1.0.1", "", {}, "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="], - - "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], - - "path-parse": ["path-parse@1.0.7", "", {}, "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="], - - "path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="], - - "path-type": ["path-type@4.0.0", "", {}, "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="], - - "pend": ["pend@1.2.0", "", {}, "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="], - - "pg": ["pg@8.16.0", "", { "dependencies": { "pg-connection-string": "^2.9.0", "pg-pool": "^3.10.0", "pg-protocol": "^1.10.0", "pg-types": "2.2.0", "pgpass": "1.0.5" }, "optionalDependencies": { "pg-cloudflare": "^1.2.5" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-7SKfdvP8CTNXjMUzfcVTaI+TDzBEeaUnVwiVGZQD1Hh33Kpev7liQba9uLd4CfN8r9mCVsD0JIpq03+Unpz+kg=="], - - "pg-cloudflare": ["pg-cloudflare@1.2.5", "", {}, "sha512-OOX22Vt0vOSRrdoUPKJ8Wi2OpE/o/h9T8X1s4qSkCedbNah9ei2W2765be8iMVxQUsvgT7zIAT2eIa9fs5+vtg=="], - - "pg-connection-string": ["pg-connection-string@2.9.0", "", {}, "sha512-P2DEBKuvh5RClafLngkAuGe9OUlFV7ebu8w1kmaaOgPcpJd1RIFh7otETfI6hAR8YupOLFTY7nuvvIn7PLciUQ=="], - - "pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="], - - "pg-mem": ["pg-mem@2.9.1", "", { "dependencies": { "functional-red-black-tree": "^1.0.1", "immutable": "^4.3.4", "json-stable-stringify": "^1.0.1", "lru-cache": "^6.0.0", "moment": "^2.27.0", "object-hash": "^2.0.3", "pgsql-ast-parser": "^12.0.1" }, "peerDependencies": { "@mikro-orm/core": ">=4.5.3", "@mikro-orm/postgresql": ">=4.5.3", "knex": ">=0.20", "kysely": ">=0.26", "pg-promise": ">=10.8.7", "slonik": ">=23.0.1", "typeorm": ">=0.2.29" }, "optionalPeers": ["@mikro-orm/core", "@mikro-orm/postgresql", "knex", "kysely", "pg-promise", "slonik", "typeorm"] }, "sha512-OYq8vde7qwvAWGCEtIjkBu6zScGYD8hp3ldDIzVgQa1vtuU8ymWww/4fvcgLuFMmDl0r3NX+ZOCw254+/cLdAA=="], - - "pg-pool": ["pg-pool@3.10.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-DzZ26On4sQ0KmqnO34muPcmKbhrjmyiO4lCCR0VwEd7MjmiKf5NTg/6+apUEu0NF7ESa37CGzFxH513CoUmWnA=="], - - "pg-protocol": ["pg-protocol@1.10.0", "", {}, "sha512-IpdytjudNuLv8nhlHs/UrVBhU0e78J0oIS/0AVdTbWxSOkFUVdsHC/NrorO6nXsQNDTT1kzDSOMJubBQviX18Q=="], - - "pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="], - - "pgpass": ["pgpass@1.0.5", "", { "dependencies": { "split2": "^4.1.0" } }, "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug=="], - - "pgsql-ast-parser": ["pgsql-ast-parser@12.0.1", "", { "dependencies": { "moo": "^0.5.1", "nearley": "^2.19.5" } }, "sha512-pe8C6Zh5MsS+o38WlSu18NhrTjAv1UNMeDTs2/Km2ZReZdYBYtwtbWGZKK2BM2izv5CrQpbmP0oI10wvHOwv4A=="], - - "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], - - "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], - - "pino": ["pino@9.7.0", "", { "dependencies": { "atomic-sleep": "^1.0.0", "fast-redact": "^3.1.1", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pino-std-serializers": "^7.0.0", "process-warning": "^5.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.2.0", "safe-stable-stringify": "^2.3.1", "sonic-boom": "^4.0.1", "thread-stream": "^3.0.0" }, "bin": { "pino": "bin.js" } }, "sha512-vnMCM6xZTb1WDmLvtG2lE/2p+t9hDEIvTWJsu6FejkE62vB7gDhvzrpFR4Cw2to+9JNQxVnkAKVPA1KPB98vWg=="], - - "pino-abstract-transport": ["pino-abstract-transport@2.0.0", "", { "dependencies": { "split2": "^4.0.0" } }, "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw=="], - - "pino-loki": ["pino-loki@2.6.0", "", { "dependencies": { "pino-abstract-transport": "^2.0.0", "pump": "^3.0.2" }, "bin": { "pino-loki": "dist/cli.js" } }, "sha512-Qy+NeIdb0YmZe/M5mgnO5aGaAyVaeqgwn45T6VajhRXZlZVfGe1YNYhFa9UZyCeNFAPGaUkD2e9yPGjx+2BBYA=="], - - "pino-pretty": ["pino-pretty@13.0.0", "", { "dependencies": { "colorette": "^2.0.7", "dateformat": "^4.6.3", "fast-copy": "^3.0.2", "fast-safe-stringify": "^2.1.1", "help-me": "^5.0.0", "joycon": "^3.1.1", "minimist": "^1.2.6", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pump": "^3.0.0", "secure-json-parse": "^2.4.0", "sonic-boom": "^4.0.1", "strip-json-comments": "^3.1.1" }, "bin": { "pino-pretty": "bin.js" } }, "sha512-cQBBIVG3YajgoUjo1FdKVRX6t9XPxwB9lcNJVD5GCnNM4Y6T12YYx8c6zEejxQsU0wrg9TwmDulcE9LR7qcJqA=="], - - "pino-std-serializers": ["pino-std-serializers@7.0.0", "", {}, "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA=="], - - "piscina": ["piscina@5.0.0", "", { "optionalDependencies": { "@napi-rs/nice": "^1.0.1" } }, "sha512-R+arufwL7sZvGjAhSMK3TfH55YdGOqhpKXkcwQJr432AAnJX/xxX19PA4QisrmJ+BTTfZVggaz6HexbkQq1l1Q=="], - - "pkg-dir": ["pkg-dir@4.2.0", "", { "dependencies": { "find-up": "^4.0.0" } }, "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ=="], - - "postcss": ["postcss@8.5.4", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-QSa9EBe+uwlGTFmHsPKokv3B/oEMQZxfqW0QqNCyhpa6mB1afzulwn8hihglqAb2pOw+BJgNlmXQ8la2VeHB7w=="], - - "postcss-media-query-parser": ["postcss-media-query-parser@0.2.3", "", {}, "sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig=="], - - "postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="], - - "postgres-array": ["postgres-array@2.0.0", "", {}, "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="], - - "postgres-bytea": ["postgres-bytea@1.0.0", "", {}, "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w=="], - - "postgres-date": ["postgres-date@1.0.7", "", {}, "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="], - - "postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="], - - "prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="], - - "proc-log": ["proc-log@5.0.0", "", {}, "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ=="], - - "process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="], - - "process-nextick-args": ["process-nextick-args@2.0.1", "", {}, "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="], - - "process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="], - - "promise-retry": ["promise-retry@2.0.1", "", { "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" } }, "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g=="], - - "proper-lockfile": ["proper-lockfile@4.1.2", "", { "dependencies": { "graceful-fs": "^4.2.4", "retry": "^0.12.0", "signal-exit": "^3.0.2" } }, "sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA=="], - - "properties-reader": ["properties-reader@2.3.0", "", { "dependencies": { "mkdirp": "^1.0.4" } }, "sha512-z597WicA7nDZxK12kZqHr2TcvwNU1GCfA5UwfDY/HDp3hXPoPlb5rlEx9bwGTiJnc0OqbBTkU975jDToth8Gxw=="], - - "property-expr": ["property-expr@2.0.6", "", {}, "sha512-SVtmxhRE/CGkn3eZY1T6pC8Nln6Fr/lu1mKSgRud0eC73whjGfoAogbn78LkD8aFL0zz3bAFerKSnOl7NlErBA=="], - - "protobufjs": ["protobufjs@7.5.3", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.4", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.0", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw=="], - - "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="], - - "pump": ["pump@3.0.2", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw=="], - - "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], - - "qjobs": ["qjobs@1.2.0", "", {}, "sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg=="], - - "qs": ["qs@6.13.0", "", { "dependencies": { "side-channel": "^1.0.6" } }, "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg=="], - - "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], - - "quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="], - - "quick-lru": ["quick-lru@5.1.1", "", {}, "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA=="], - - "railroad-diagrams": ["railroad-diagrams@1.0.0", "", {}, "sha512-cz93DjNeLY0idrCNOH6PviZGRN9GJhsdm9hpn1YCS879fj4W+x5IFJhhkRZcwVgMmFF7R82UA/7Oh+R8lLZg6A=="], - - "randexp": ["randexp@0.4.6", "", { "dependencies": { "discontinuous-range": "1.0.0", "ret": "~0.1.10" } }, "sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ=="], - - "range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="], - - "raw-body": ["raw-body@2.5.2", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA=="], - - "readable-stream": ["readable-stream@4.7.0", "", { "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", "events": "^3.3.0", "process": "^0.11.10", "string_decoder": "^1.3.0" } }, "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg=="], - - "readdir-glob": ["readdir-glob@1.1.3", "", { "dependencies": { "minimatch": "^5.1.0" } }, "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA=="], - - "readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="], - - "real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="], - - "redis-errors": ["redis-errors@1.2.0", "", {}, "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w=="], - - "redis-parser": ["redis-parser@3.0.0", "", { "dependencies": { "redis-errors": "^1.0.0" } }, "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A=="], - - "reflect-metadata": ["reflect-metadata@0.2.2", "", {}, "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q=="], - - "require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="], - - "require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="], - - "requires-port": ["requires-port@1.0.0", "", {}, "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="], - - "resolve": ["resolve@1.22.10", "", { "dependencies": { "is-core-module": "^2.16.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w=="], - - "resolve-alpn": ["resolve-alpn@1.2.1", "", {}, "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g=="], - - "resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="], - - "responselike": ["responselike@3.0.0", "", { "dependencies": { "lowercase-keys": "^3.0.0" } }, "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg=="], - - "restore-cursor": ["restore-cursor@5.1.0", "", { "dependencies": { "onetime": "^7.0.0", "signal-exit": "^4.1.0" } }, "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA=="], - - "ret": ["ret@0.1.15", "", {}, "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg=="], - - "retry": ["retry@0.12.0", "", {}, "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow=="], - - "reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="], - - "rfdc": ["rfdc@1.4.1", "", {}, "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA=="], - - "rimraf": ["rimraf@3.0.2", "", { "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" } }, "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA=="], - - "rollup": ["rollup@4.40.2", "", { "dependencies": { "@types/estree": "1.0.7" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.40.2", "@rollup/rollup-android-arm64": "4.40.2", "@rollup/rollup-darwin-arm64": "4.40.2", "@rollup/rollup-darwin-x64": "4.40.2", "@rollup/rollup-freebsd-arm64": "4.40.2", "@rollup/rollup-freebsd-x64": "4.40.2", "@rollup/rollup-linux-arm-gnueabihf": "4.40.2", "@rollup/rollup-linux-arm-musleabihf": "4.40.2", "@rollup/rollup-linux-arm64-gnu": "4.40.2", "@rollup/rollup-linux-arm64-musl": "4.40.2", "@rollup/rollup-linux-loongarch64-gnu": "4.40.2", "@rollup/rollup-linux-powerpc64le-gnu": "4.40.2", "@rollup/rollup-linux-riscv64-gnu": "4.40.2", "@rollup/rollup-linux-riscv64-musl": "4.40.2", "@rollup/rollup-linux-s390x-gnu": "4.40.2", "@rollup/rollup-linux-x64-gnu": "4.40.2", "@rollup/rollup-linux-x64-musl": "4.40.2", "@rollup/rollup-win32-arm64-msvc": "4.40.2", "@rollup/rollup-win32-ia32-msvc": "4.40.2", "@rollup/rollup-win32-x64-msvc": "4.40.2", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-tfUOg6DTP4rhQ3VjOO6B4wyrJnGOX85requAXvqYTHsOgb2TFJdZ3aWpT8W2kPoypSGP7dZUyzxJ9ee4buM5Fg=="], - - "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="], - - "rxjs": ["rxjs@7.8.2", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA=="], - - "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], - - "safe-regex-test": ["safe-regex-test@1.1.0", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-regex": "^1.2.1" } }, "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw=="], - - "safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="], - - "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="], - - "sass": ["sass@1.88.0", "", { "dependencies": { "chokidar": "^4.0.0", "immutable": "^5.0.2", "source-map-js": ">=0.6.2 <2.0.0" }, "optionalDependencies": { "@parcel/watcher": "^2.4.1" }, "bin": { "sass": "sass.js" } }, "sha512-sF6TWQqjFvr4JILXzG4ucGOLELkESHL+I5QJhh7CNaE+Yge0SI+ehCatsXhJ7ymU1hAFcIS3/PBpjdIbXoyVbg=="], - - "secure-json-parse": ["secure-json-parse@2.7.0", "", {}, "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw=="], - - "semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], - - "set-function-length": ["set-function-length@1.2.2", "", { "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", "has-property-descriptors": "^1.0.2" } }, "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg=="], - - "setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="], - - "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="], - - "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], - - "side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], - - "side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="], - - "side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="], - - "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], - - "signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], - - "sigstore": ["sigstore@3.1.0", "", { "dependencies": { "@sigstore/bundle": "^3.1.0", "@sigstore/core": "^2.0.0", "@sigstore/protobuf-specs": "^0.4.0", "@sigstore/sign": "^3.1.0", "@sigstore/tuf": "^3.1.0", "@sigstore/verify": "^2.1.0" } }, "sha512-ZpzWAFHIFqyFE56dXqgX/DkDRZdz+rRcjoIk/RQU4IX0wiCv1l8S7ZrXDHcCc+uaf+6o7w3h2l3g6GYG5TKN9Q=="], - - "slash": ["slash@3.0.0", "", {}, "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q=="], - - "slice-ansi": ["slice-ansi@5.0.0", "", { "dependencies": { "ansi-styles": "^6.0.0", "is-fullwidth-code-point": "^4.0.0" } }, "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ=="], - - "smart-buffer": ["smart-buffer@4.2.0", "", {}, "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg=="], - - "socket.io": ["socket.io@4.8.1", "", { "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "cors": "~2.8.5", "debug": "~4.3.2", "engine.io": "~6.6.0", "socket.io-adapter": "~2.5.2", "socket.io-parser": "~4.2.4" } }, "sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg=="], - - "socket.io-adapter": ["socket.io-adapter@2.5.5", "", { "dependencies": { "debug": "~4.3.4", "ws": "~8.17.1" } }, "sha512-eLDQas5dzPgOWCk9GuuJC2lBqItuhKI4uxGgo9aIV7MYbk2h9Q6uULEh8WBzThoI7l+qU9Ast9fVUmkqPP9wYg=="], - - "socket.io-parser": ["socket.io-parser@4.2.4", "", { "dependencies": { "@socket.io/component-emitter": "~3.1.0", "debug": "~4.3.1" } }, "sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew=="], - - "socks": ["socks@2.8.4", "", { "dependencies": { "ip-address": "^9.0.5", "smart-buffer": "^4.2.0" } }, "sha512-D3YaD0aRxR3mEcqnidIs7ReYJFVzWdd6fXJYUM8ixcQcJRGTka/b3saV0KflYhyVJXKhb947GndU35SxYNResQ=="], - - "socks-proxy-agent": ["socks-proxy-agent@8.0.5", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "^4.3.4", "socks": "^2.8.3" } }, "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw=="], - - "sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="], - - "source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="], - - "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], - - "source-map-support": ["source-map-support@0.5.21", "", { "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w=="], - - "sparse-bitfield": ["sparse-bitfield@3.0.3", "", { "dependencies": { "memory-pager": "^1.0.2" } }, "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ=="], - - "spdx-correct": ["spdx-correct@3.2.0", "", { "dependencies": { "spdx-expression-parse": "^3.0.0", "spdx-license-ids": "^3.0.0" } }, "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA=="], - - "spdx-exceptions": ["spdx-exceptions@2.5.0", "", {}, "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w=="], - - "spdx-expression-parse": ["spdx-expression-parse@3.0.1", "", { "dependencies": { "spdx-exceptions": "^2.1.0", "spdx-license-ids": "^3.0.0" } }, "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q=="], - - "spdx-license-ids": ["spdx-license-ids@3.0.21", "", {}, "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg=="], - - "split-ca": ["split-ca@1.0.1", "", {}, "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ=="], - - "split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="], - - "sprintf-js": ["sprintf-js@1.1.3", "", {}, "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA=="], - - "ssh-remote-port-forward": ["ssh-remote-port-forward@1.0.4", "", { "dependencies": { "@types/ssh2": "^0.5.48", "ssh2": "^1.4.0" } }, "sha512-x0LV1eVDwjf1gmG7TTnfqIzf+3VPRz7vrNIjX6oYLbeCrf/PeVY6hkT68Mg+q02qXxQhrLjB0jfgvhevoCRmLQ=="], - - "ssh2": ["ssh2@1.16.0", "", { "dependencies": { "asn1": "^0.2.6", "bcrypt-pbkdf": "^1.0.2" }, "optionalDependencies": { "cpu-features": "~0.0.10", "nan": "^2.20.0" } }, "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg=="], - - "ssri": ["ssri@12.0.0", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ=="], - - "standard-as-callback": ["standard-as-callback@2.1.0", "", {}, "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A=="], - - "statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], - - "stdin-discarder": ["stdin-discarder@0.2.2", "", {}, "sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ=="], - - "streamroller": ["streamroller@3.1.5", "", { "dependencies": { "date-format": "^4.0.14", "debug": "^4.3.4", "fs-extra": "^8.1.0" } }, "sha512-KFxaM7XT+irxvdqSP1LGLgNWbYN7ay5owZ3r/8t77p+EtSUAfUgtl7be3xtqtOmGUl9K9YPO2ca8133RlTjvKw=="], - - "streamx": ["streamx@2.22.1", "", { "dependencies": { "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" }, "optionalDependencies": { "bare-events": "^2.2.0" } }, "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA=="], - - "string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], - - "string-width-cjs": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], - - "string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="], - - "strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - - "strip-ansi-cjs": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], - - "strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="], - - "superagent": ["superagent@8.1.2", "", { "dependencies": { "component-emitter": "^1.3.0", "cookiejar": "^2.1.4", "debug": "^4.3.4", "fast-safe-stringify": "^2.1.1", "form-data": "^4.0.0", "formidable": "^2.1.2", "methods": "^1.1.2", "mime": "2.6.0", "qs": "^6.11.0", "semver": "^7.3.8" } }, "sha512-6WTxW1EB6yCxV5VFOIPQruWGHqc3yI7hEmZK6h+pyk69Lk/Ut7rLUY6W/ONF2MjBuGjvmMiIpsrVJ2vjrHlslA=="], - - "supertest": ["supertest@6.3.4", "", { "dependencies": { "methods": "^1.1.2", "superagent": "^8.1.2" } }, "sha512-erY3HFDG0dPnhw4U+udPfrzXa4xhSG+n4rxfRuZWCUvjFWwKl+OxWf/7zk50s84/fAAs7vf5QAb9uRa0cCykxw=="], - - "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], - - "supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="], - - "tailwindcss": ["tailwindcss@4.1.8", "", {}, "sha512-kjeW8gjdxasbmFKpVGrGd5T4i40mV5J2Rasw48QARfYeQ8YS9x02ON9SFWax3Qf616rt4Cp3nVNIj6Hd1mP3og=="], - - "tapable": ["tapable@2.2.2", "", {}, "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg=="], - - "tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="], - - "tar-fs": ["tar-fs@3.0.9", "", { "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { "bare-fs": "^4.0.1", "bare-path": "^3.0.0" } }, "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA=="], - - "tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="], - - "testcontainers": ["testcontainers@10.28.0", "", { "dependencies": { "@balena/dockerignore": "^1.0.2", "@types/dockerode": "^3.3.35", "archiver": "^7.0.1", "async-lock": "^1.4.1", "byline": "^5.0.0", "debug": "^4.3.5", "docker-compose": "^0.24.8", "dockerode": "^4.0.5", "get-port": "^7.1.0", "proper-lockfile": "^4.1.2", "properties-reader": "^2.3.0", "ssh-remote-port-forward": "^1.0.4", "tar-fs": "^3.0.7", "tmp": "^0.2.3", "undici": "^5.29.0" } }, "sha512-1fKrRRCsgAQNkarjHCMKzBKXSJFmzNTiTbhb5E/j5hflRXChEtHvkefjaHlgkNUjfw92/Dq8LTgwQn6RDBFbMg=="], - - "text-decoder": ["text-decoder@1.2.3", "", { "dependencies": { "b4a": "^1.6.4" } }, "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA=="], - - "text-table": ["text-table@0.2.0", "", {}, "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw=="], - - "thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="], - - "tiny-case": ["tiny-case@1.0.3", "", {}, "sha512-Eet/eeMhkO6TX8mnUteS9zgPbUMQa4I6Kkp5ORiBD5476/m+PIRiumP5tmh5ioJpH7k51Kehawy2UDfsnxxY8Q=="], - - "tinyglobby": ["tinyglobby@0.2.13", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw=="], - - "tmp": ["tmp@0.2.3", "", {}, "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w=="], - - "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], - - "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], - - "toposort": ["toposort@2.0.2", "", {}, "sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg=="], - - "tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="], - - "trading-dashboard": ["trading-dashboard@workspace:apps/dashboard"], - - "ts-api-utils": ["ts-api-utils@1.4.3", "", { "peerDependencies": { "typescript": ">=4.2.0" } }, "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw=="], - - "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - - "tuf-js": ["tuf-js@3.0.1", "", { "dependencies": { "@tufjs/models": "3.0.1", "debug": "^4.3.6", "make-fetch-happen": "^14.0.1" } }, "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA=="], - - "turbo": ["turbo@2.5.4", "", { "optionalDependencies": { "turbo-darwin-64": "2.5.4", "turbo-darwin-arm64": "2.5.4", "turbo-linux-64": "2.5.4", "turbo-linux-arm64": "2.5.4", "turbo-windows-64": "2.5.4", "turbo-windows-arm64": "2.5.4" }, "bin": { "turbo": "bin/turbo" } }, "sha512-kc8ZibdRcuWUG1pbYSBFWqmIjynlD8Lp7IB6U3vIzvOv9VG+6Sp8bzyeBWE3Oi8XV5KsQrznyRTBPvrf99E4mA=="], - - "turbo-darwin-64": ["turbo-darwin-64@2.5.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ah6YnH2dErojhFooxEzmvsoZQTMImaruZhFPfMKPBq8sb+hALRdvBNLqfc8NWlZq576FkfRZ/MSi4SHvVFT9PQ=="], - - "turbo-darwin-arm64": ["turbo-darwin-arm64@2.5.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-2+Nx6LAyuXw2MdXb7pxqle3MYignLvS7OwtsP9SgtSBaMlnNlxl9BovzqdYAgkUW3AsYiQMJ/wBRb7d+xemM5A=="], - - "turbo-linux-64": ["turbo-linux-64@2.5.4", "", { "os": "linux", "cpu": "x64" }, "sha512-5May2kjWbc8w4XxswGAl74GZ5eM4Gr6IiroqdLhXeXyfvWEdm2mFYCSWOzz0/z5cAgqyGidF1jt1qzUR8hTmOA=="], - - "turbo-linux-arm64": ["turbo-linux-arm64@2.5.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-/2yqFaS3TbfxV3P5yG2JUI79P7OUQKOUvAnx4MV9Bdz6jqHsHwc9WZPpO4QseQm+NvmgY6ICORnoVPODxGUiJg=="], - - "turbo-windows-64": ["turbo-windows-64@2.5.4", "", { "os": "win32", "cpu": "x64" }, "sha512-EQUO4SmaCDhO6zYohxIjJpOKRN3wlfU7jMAj3CgcyTPvQR/UFLEKAYHqJOnJtymbQmiiM/ihX6c6W6Uq0yC7mA=="], - - "turbo-windows-arm64": ["turbo-windows-arm64@2.5.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-oQ8RrK1VS8lrxkLriotFq+PiF7iiGgkZtfLKF4DDKsmdbPo0O9R2mQxm7jHLuXraRCuIQDWMIw6dpcr7Iykf4A=="], - - "tweetnacl": ["tweetnacl@0.14.5", "", {}, "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="], - - "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], - - "type-fest": ["type-fest@2.19.0", "", {}, "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA=="], - - "type-is": ["type-is@1.6.18", "", { "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" } }, "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g=="], - - "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="], - - "ua-parser-js": ["ua-parser-js@0.7.40", "", { "bin": { "ua-parser-js": "script/cli.js" } }, "sha512-us1E3K+3jJppDBa3Tl0L3MOJiGhe1C6P0+nIvQAFYbxlMAx0h81eOwLmU57xgqToduDDPx3y5QsdjPfDu+FgOQ=="], - - "undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="], - - "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], - - "unique-filename": ["unique-filename@4.0.0", "", { "dependencies": { "unique-slug": "^5.0.0" } }, "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ=="], - - "unique-slug": ["unique-slug@5.0.0", "", { "dependencies": { "imurmurhash": "^0.1.4" } }, "sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg=="], - - "universalify": ["universalify@0.1.2", "", {}, "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="], - - "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], - - "update-browserslist-db": ["update-browserslist-db@1.1.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw=="], - - "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], - - "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], - - "utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="], - - "uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="], - - "validate-npm-package-license": ["validate-npm-package-license@3.0.4", "", { "dependencies": { "spdx-correct": "^3.0.0", "spdx-expression-parse": "^3.0.0" } }, "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew=="], - - "validate-npm-package-name": ["validate-npm-package-name@6.0.1", "", {}, "sha512-OaI//3H0J7ZkR1OqlhGA8cA+Cbk/2xFOQpJOt5+s27/ta9eZwpeervh4Mxh4w0im/kdgktowaqVNR7QOrUd7Yg=="], - - "vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], - - "vite": ["vite@6.3.5", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ=="], - - "void-elements": ["void-elements@2.0.1", "", {}, "sha512-qZKX4RnBzH2ugr8Lxa7x+0V6XD9Sb/ouARtiasEQCHB1EVU4NXtmHsDDrx1dO4ne5fc3J6EW05BP1Dl0z0iung=="], - - "watchpack": ["watchpack@2.4.2", "", { "dependencies": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" } }, "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw=="], - - "weak-lru-cache": ["weak-lru-cache@1.2.2", "", {}, "sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw=="], - - "webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="], - - "whatwg-url": ["whatwg-url@14.2.0", "", { "dependencies": { "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" } }, "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw=="], - - "which": ["which@1.3.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "which": "./bin/which" } }, "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ=="], - - "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], - - "wrap-ansi": ["wrap-ansi@9.0.0", "", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q=="], - - "wrap-ansi-cjs": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], - - "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="], - - "ws": ["ws@8.18.2", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ=="], - - "xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="], - - "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="], - - "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], - - "yaml": ["yaml@2.8.0", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ=="], - - "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], - - "yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="], - - "yauzl": ["yauzl@3.2.0", "", { "dependencies": { "buffer-crc32": "~0.2.3", "pend": "~1.2.0" } }, "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w=="], - - "yocto-queue": ["yocto-queue@1.2.1", "", {}, "sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg=="], - - "yoctocolors-cjs": ["yoctocolors-cjs@2.1.2", "", {}, "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA=="], - - "yup": ["yup@1.6.1", "", { "dependencies": { "property-expr": "^2.0.5", "tiny-case": "^1.0.3", "toposort": "^2.0.2", "type-fest": "^2.19.0" } }, "sha512-JED8pB50qbA4FOkDol0bYF/p60qSEDQqBD0/qeIrUCG1KbPBIQ776fCUNb9ldbPcSTxA69g/47XTo4TqWiuXOA=="], - - "zip-stream": ["zip-stream@6.0.1", "", { "dependencies": { "archiver-utils": "^5.0.0", "compress-commons": "^6.0.2", "readable-stream": "^4.0.0" } }, "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA=="], - - "zone.js": ["zone.js@0.15.1", "", {}, "sha512-XE96n56IQpJM7NAoXswY3XRLcWFW83xe0BiAOeMD7K5k5xecOeul3Qcpx6GqEeeHNkW5DWL5zOyTbEfB4eti8w=="], - - "@angular-devkit/core/ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="], - - "@angular-devkit/core/source-map": ["source-map@0.7.4", "", {}, "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA=="], - - "@angular/compiler-cli/@babel/core": ["@babel/core@7.27.4", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.4", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/traverse": "^7.27.4", "@babel/types": "^7.27.3", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g=="], - - "@angular/compiler-cli/yargs": ["yargs@18.0.0", "", { "dependencies": { "cliui": "^9.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "string-width": "^7.2.0", "y18n": "^5.0.5", "yargs-parser": "^22.0.0" } }, "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg=="], - - "@babel/core/convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], - - "@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - - "@babel/helper-compilation-targets/lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="], - - "@babel/helper-compilation-targets/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - - "@babel/traverse/globals": ["globals@11.12.0", "", {}, "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA=="], - - "@inquirer/core/signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], - - "@inquirer/core/wrap-ansi": ["wrap-ansi@6.2.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA=="], - - "@isaacs/cliui/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], - - "@isaacs/cliui/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - - "@isaacs/cliui/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], - - "@listr2/prompt-adapter-inquirer/@inquirer/type": ["@inquirer/type@1.5.5", "", { "dependencies": { "mute-stream": "^1.0.0" } }, "sha512-MzICLu4yS7V8AA61sANROZ9vT1H3ooca5dSmI1FjZkzq7o/koMsRfQSzRtFo+F3Ao4Sf1C0bpLKejpKB/+j6MA=="], - - "@npmcli/agent/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - - "@npmcli/git/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - - "@npmcli/git/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], - - "@npmcli/package-json/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="], - - "@npmcli/promise-spawn/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], - - "@npmcli/run-script/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], - - "@parcel/watcher/detect-libc": ["detect-libc@1.0.3", "", { "bin": { "detect-libc": "./bin/detect-libc.js" } }, "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg=="], - - "@parcel/watcher/node-addon-api": ["node-addon-api@7.1.1", "", {}, "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ=="], - - "@stock-bot/cache/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/config/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/data-frame/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/event-bus/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/http/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/logger/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/mongodb-client/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/postgres-client/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/questdb-client/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/shutdown/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/strategy-engine/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/strategy-service/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/strategy-service/commander": ["commander@11.1.0", "", {}, "sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ=="], - - "@stock-bot/types/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/utils/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@stock-bot/vector-engine/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], - - "@tailwindcss/oxide/tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], - - "@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.4.3", "", { "dependencies": { "@emnapi/wasi-threads": "1.0.2", "tslib": "^2.4.0" }, "bundled": true }, "sha512-4m62DuCE07lw01soJwPiBGC0nAww0Q+RY70VZ+n49yDIO13yyinhbWCeNnaob0lakDtWQzSdtNWzJeOJt2ma+g=="], - - "@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.4.3", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ=="], - - "@tailwindcss/oxide-wasm32-wasi/@emnapi/wasi-threads": ["@emnapi/wasi-threads@1.0.2", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-5n3nTJblwRi8LlXkJ9eBzu+kZR8Yxcc7ubakyQTFzPMtIhFpUBRbsnc2Dv88IZDIbCDlBiWrknhB4Lsz7mg6BA=="], - - "@tailwindcss/oxide-wasm32-wasi/@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@0.2.11", "", { "dependencies": { "@emnapi/core": "^1.4.3", "@emnapi/runtime": "^1.4.3", "@tybys/wasm-util": "^0.9.0" }, "bundled": true }, "sha512-9DPkXtvHydrcOsopiYpUgPHpmj0HWZKMUnL2dZqpvC42lsratuBG06V5ipyno0fUek5VlFsNQ+AcFATSrJXgMA=="], - - "@tailwindcss/oxide-wasm32-wasi/@tybys/wasm-util": ["@tybys/wasm-util@0.9.0", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw=="], - - "@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], - - "@tufjs/models/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.3", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg=="], - - "ajv-formats/ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="], - - "ansi-escapes/type-fest": ["type-fest@0.21.3", "", {}, "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w=="], - - "anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], - - "archiver-utils/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="], - - "archiver-utils/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="], - - "bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], - - "bl/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - - "body-parser/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], - - "cacache/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="], - - "cacache/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - - "cacache/tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], - - "cli-truncate/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], - - "cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], - - "compress-commons/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="], - - "connect/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], - - "cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], - - "decompress-response/mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="], - - "docker-modem/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - - "dockerode/tar-fs": ["tar-fs@2.1.3", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg=="], - - "dockerode/uuid": ["uuid@10.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="], - - "dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], - - "encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], - - "engine.io/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], - - "engine.io/ws": ["ws@8.17.1", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ=="], - - "ent/punycode": ["punycode@1.4.1", "", {}, "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ=="], - - "external-editor/tmp": ["tmp@0.0.33", "", { "dependencies": { "os-tmpdir": "~1.0.2" } }, "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw=="], - - "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], - - "finalhandler/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], - - "finalhandler/on-finished": ["on-finished@2.3.0", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww=="], - - "finalhandler/statuses": ["statuses@1.5.0", "", {}, "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA=="], - - "find-cache-dir/make-dir": ["make-dir@3.1.0", "", { "dependencies": { "semver": "^6.0.0" } }, "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw=="], - - "foreground-child/signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], - - "globals/type-fest": ["type-fest@0.20.2", "", {}, "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ=="], - - "got/type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], - - "hosted-git-info/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - - "http-proxy/eventemitter3": ["eventemitter3@4.0.7", "", {}, "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="], - - "ignore-walk/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "istanbul-lib-instrument/@babel/core": ["@babel/core@7.27.4", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.4", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/traverse": "^7.27.4", "@babel/types": "^7.27.3", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g=="], - - "karma/chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="], - - "karma/yargs": ["yargs@16.2.0", "", { "dependencies": { "cliui": "^7.0.2", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.0", "y18n": "^5.0.5", "yargs-parser": "^20.2.2" } }, "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw=="], - - "karma-coverage/istanbul-lib-instrument": ["istanbul-lib-instrument@5.2.1", "", { "dependencies": { "@babel/core": "^7.12.3", "@babel/parser": "^7.14.7", "@istanbuljs/schema": "^0.1.2", "istanbul-lib-coverage": "^3.2.0", "semver": "^6.3.0" } }, "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg=="], - - "karma-jasmine/jasmine-core": ["jasmine-core@4.6.1", "", {}, "sha512-VYz/BjjmC3klLJlLwA4Kw8ytk0zDSmbbDLNs794VnWmkcCB7I9aAL/D48VNQtmITyPvea2C3jdUMfc3kAoy0PQ=="], - - "lazystream/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="], - - "log-symbols/chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="], - - "log-symbols/is-unicode-supported": ["is-unicode-supported@1.3.0", "", {}, "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ=="], - - "log-update/ansi-escapes": ["ansi-escapes@7.0.0", "", { "dependencies": { "environment": "^1.0.0" } }, "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw=="], - - "log-update/slice-ansi": ["slice-ansi@7.1.0", "", { "dependencies": { "ansi-styles": "^6.2.1", "is-fullwidth-code-point": "^5.0.0" } }, "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg=="], - - "log-update/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - - "make-fetch-happen/negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], - - "micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], - - "minipass-flush/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - - "minipass-pipeline/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - - "minipass-sized/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - - "mongodb-memory-server-core/mongodb": ["mongodb@5.9.2", "", { "dependencies": { "bson": "^5.5.0", "mongodb-connection-string-url": "^2.6.0", "socks": "^2.7.1" }, "optionalDependencies": { "@mongodb-js/saslprep": "^1.1.0" }, "peerDependencies": { "@aws-sdk/credential-providers": "^3.188.0", "@mongodb-js/zstd": "^1.0.0", "kerberos": "^1.0.0 || ^2.0.0", "mongodb-client-encryption": ">=2.3.0 <3", "snappy": "^7.2.2" }, "optionalPeers": ["@aws-sdk/credential-providers", "@mongodb-js/zstd", "kerberos", "mongodb-client-encryption", "snappy"] }, "sha512-H60HecKO4Bc+7dhOv4sJlgvenK4fQNqqUIlXxZYQNbfEWSALGAwGoyJd/0Qwk4TttFXUOHJ2ZJQe/52ScaUwtQ=="], - - "nearley/commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="], - - "node-gyp/tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], - - "node-gyp/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], - - "ora/chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="], - - "ora/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], - - "ora/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - - "p-locate/p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="], - - "path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], - - "pkg-dir/find-up": ["find-up@4.1.0", "", { "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" } }, "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw=="], - - "properties-reader/mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="], - - "readdir-glob/minimatch": ["minimatch@5.1.6", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g=="], - - "restore-cursor/signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], - - "sass/immutable": ["immutable@5.1.2", "", {}, "sha512-qHKXW1q6liAk1Oys6umoaZbDRqjcjgSrbnrifHsfsttza7zcvRAsL7mMV6xWcyhwQy7Xj5v4hhbr6b+iDYwlmQ=="], - - "slice-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], - - "slice-ansi/is-fullwidth-code-point": ["is-fullwidth-code-point@4.0.0", "", {}, "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ=="], - - "socket.io/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], - - "socket.io-adapter/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], - - "socket.io-adapter/ws": ["ws@8.17.1", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ=="], - - "socket.io-parser/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], - - "tar/fs-minipass": ["fs-minipass@2.1.0", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg=="], - - "tar/minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="], - - "tar/minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="], - - "tar/mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="], - - "wrap-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], - - "wrap-ansi/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], - - "wrap-ansi/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - - "yauzl/buffer-crc32": ["buffer-crc32@0.2.13", "", {}, "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ=="], - - "@angular-devkit/core/ajv/json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="], - - "@angular/compiler-cli/@babel/core/convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], - - "@angular/compiler-cli/@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - - "@angular/compiler-cli/yargs/cliui": ["cliui@9.0.1", "", { "dependencies": { "string-width": "^7.2.0", "strip-ansi": "^7.1.0", "wrap-ansi": "^9.0.0" } }, "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w=="], - - "@angular/compiler-cli/yargs/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], - - "@angular/compiler-cli/yargs/yargs-parser": ["yargs-parser@22.0.0", "", {}, "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw=="], - - "@babel/helper-compilation-targets/lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="], - - "@isaacs/cliui/string-width/emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="], - - "@isaacs/cliui/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], - - "@isaacs/cliui/wrap-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], - - "@listr2/prompt-adapter-inquirer/@inquirer/type/mute-stream": ["mute-stream@1.0.0", "", {}, "sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA=="], - - "@npmcli/git/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], - - "@npmcli/package-json/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "@npmcli/promise-spawn/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], - - "@npmcli/run-script/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], - - "@tailwindcss/oxide/tar/chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], - - "@tailwindcss/oxide/tar/mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], - - "@tailwindcss/oxide/tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], - - "@tufjs/models/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - - "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - - "ajv-formats/ajv/json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="], - - "archiver-utils/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "body-parser/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], - - "cacache/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], - - "cacache/tar/chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], - - "cacache/tar/mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], - - "cacache/tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], - - "cli-truncate/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], - - "cli-truncate/string-width/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - - "connect/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], - - "dockerode/tar-fs/chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="], - - "dockerode/tar-fs/tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="], - - "finalhandler/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], - - "find-cache-dir/make-dir/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - - "ignore-walk/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - - "istanbul-lib-instrument/@babel/core/convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], - - "istanbul-lib-instrument/@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - - "karma-coverage/istanbul-lib-instrument/@babel/core": ["@babel/core@7.27.4", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.4", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/traverse": "^7.27.4", "@babel/types": "^7.27.3", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g=="], - - "karma-coverage/istanbul-lib-instrument/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], - - "karma/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], - - "karma/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="], - - "karma/yargs/cliui": ["cliui@7.0.4", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", "wrap-ansi": "^7.0.0" } }, "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ=="], - - "karma/yargs/yargs-parser": ["yargs-parser@20.2.9", "", {}, "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w=="], - - "lazystream/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], - - "lazystream/readable-stream/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="], - - "lazystream/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="], - - "log-update/slice-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], - - "log-update/slice-ansi/is-fullwidth-code-point": ["is-fullwidth-code-point@5.0.0", "", { "dependencies": { "get-east-asian-width": "^1.0.0" } }, "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA=="], - - "log-update/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], - - "mongodb-memory-server-core/mongodb/bson": ["bson@5.5.1", "", {}, "sha512-ix0EwukN2EpC0SRWIj/7B5+A6uQMQy6KMREI9qQqvgpkV2frH63T0UDVd1SYedL6dNCmDBYB3QtXi4ISk9YT+g=="], - - "mongodb-memory-server-core/mongodb/mongodb-connection-string-url": ["mongodb-connection-string-url@2.6.0", "", { "dependencies": { "@types/whatwg-url": "^8.2.1", "whatwg-url": "^11.0.0" } }, "sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ=="], - - "node-gyp/tar/chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], - - "node-gyp/tar/mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], - - "node-gyp/tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], - - "node-gyp/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], - - "ora/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], - - "ora/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], - - "p-locate/p-limit/yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], - - "pkg-dir/find-up/locate-path": ["locate-path@5.0.0", "", { "dependencies": { "p-locate": "^4.1.0" } }, "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g=="], - - "readdir-glob/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - - "tar/fs-minipass/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - - "tar/minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], - - "wrap-ansi/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], - - "wrap-ansi/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], - - "@angular/compiler-cli/yargs/cliui/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - - "@angular/compiler-cli/yargs/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], - - "@angular/compiler-cli/yargs/string-width/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], - - "@npmcli/package-json/glob/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - - "archiver-utils/glob/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - - "cacache/glob/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], - - "cli-truncate/string-width/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], - - "dockerode/tar-fs/tar-stream/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], - - "karma-coverage/istanbul-lib-instrument/@babel/core/convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], - - "karma/chokidar/readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], - - "karma/yargs/cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], - - "mongodb-memory-server-core/mongodb/mongodb-connection-string-url/@types/whatwg-url": ["@types/whatwg-url@8.2.2", "", { "dependencies": { "@types/node": "*", "@types/webidl-conversions": "*" } }, "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA=="], - - "mongodb-memory-server-core/mongodb/mongodb-connection-string-url/whatwg-url": ["whatwg-url@11.0.0", "", { "dependencies": { "tr46": "^3.0.0", "webidl-conversions": "^7.0.0" } }, "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ=="], - - "pkg-dir/find-up/locate-path/p-locate": ["p-locate@4.1.0", "", { "dependencies": { "p-limit": "^2.2.0" } }, "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A=="], - - "@angular/compiler-cli/yargs/cliui/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], - - "@angular/compiler-cli/yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], - - "mongodb-memory-server-core/mongodb/mongodb-connection-string-url/whatwg-url/tr46": ["tr46@3.0.0", "", { "dependencies": { "punycode": "^2.1.1" } }, "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA=="], - - "pkg-dir/find-up/locate-path/p-locate/p-limit": ["p-limit@2.3.0", "", { "dependencies": { "p-try": "^2.0.0" } }, "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w=="], - } -} +{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "stock-bot", + "dependencies": { + "bullmq": "^5.53.2", + }, + "devDependencies": { + "@testcontainers/mongodb": "^10.7.2", + "@testcontainers/postgresql": "^10.7.2", + "@types/bun": "latest", + "@types/node": "^22.15.30", + "@types/supertest": "^6.0.2", + "@types/yup": "^0.32.0", + "bun-types": "^1.2.15", + "mongodb-memory-server": "^9.1.6", + "pg-mem": "^2.8.1", + "supertest": "^6.3.4", + "turbo": "^2.5.4", + "typescript": "^5.8.3", + "yup": "^1.6.1", + }, + }, + "apps/dashboard": { + "name": "trading-dashboard", + "version": "0.0.0", + "dependencies": { + "@angular/animations": "^20.0.0", + "@angular/cdk": "^20.0.1", + "@angular/common": "^20.0.0", + "@angular/compiler": "^20.0.0", + "@angular/core": "^20.0.0", + "@angular/forms": "^20.0.0", + "@angular/material": "^20.0.1", + "@angular/platform-browser": "^20.0.0", + "@angular/router": "^20.0.0", + "rxjs": "~7.8.2", + "tslib": "^2.8.1", + "zone.js": "~0.15.1", + }, + "devDependencies": { + "@angular/build": "^20.0.0", + "@angular/cli": "^20.0.0", + "@angular/compiler-cli": "^20.0.0", + "@tailwindcss/postcss": "^4.1.8", + "@types/jasmine": "~5.1.8", + "autoprefixer": "^10.4.21", + "jasmine-core": "~5.7.1", + "karma": "~6.4.4", + "karma-chrome-launcher": "~3.2.0", + "karma-coverage": "~2.2.1", + "karma-jasmine": "~5.1.0", + "karma-jasmine-html-reporter": "~2.1.0", + "postcss": "^8.5.4", + "tailwindcss": "^4.1.8", + "typescript": "~5.8.3", + }, + }, + "apps/data-service": { + "name": "@stock-bot/data-service", + "version": "1.0.0", + "dependencies": { + "@stock-bot/cache": "*", + "@stock-bot/config": "*", + "@stock-bot/event-bus": "*", + "@stock-bot/http": "*", + "@stock-bot/logger": "*", + "@stock-bot/mongodb-client": "*", + "@stock-bot/questdb-client": "*", + "@stock-bot/shutdown": "*", + "@stock-bot/types": "*", + "bullmq": "^5.53.2", + "hono": "^4.0.0", + "p-limit": "^6.2.0", + "ws": "^8.0.0", + }, + "devDependencies": { + "@types/ws": "^8.0.0", + "typescript": "^5.0.0", + }, + }, + "apps/execution-service": { + "name": "@stock-bot/execution-service", + "version": "1.0.0", + "dependencies": { + "@hono/node-server": "^1.12.0", + "@stock-bot/config": "*", + "@stock-bot/event-bus": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "@stock-bot/utils": "*", + "hono": "^4.6.1", + }, + "devDependencies": { + "@types/node": "^22.5.0", + "typescript": "^5.5.4", + }, + }, + "apps/portfolio-service": { + "name": "@stock-bot/portfolio-service", + "version": "1.0.0", + "dependencies": { + "@hono/node-server": "^1.12.0", + "@stock-bot/config": "*", + "@stock-bot/data-frame": "*", + "@stock-bot/logger": "*", + "@stock-bot/questdb-client": "*", + "@stock-bot/types": "*", + "@stock-bot/utils": "*", + "hono": "^4.6.1", + }, + "devDependencies": { + "@types/node": "^22.5.0", + "typescript": "^5.5.4", + }, + }, + "apps/processing-service": { + "name": "@stock-bot/processing-service", + "version": "1.0.0", + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/event-bus": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "@stock-bot/utils": "*", + "@stock-bot/vector-engine": "*", + "hono": "^4.0.0", + }, + "devDependencies": { + "typescript": "^5.0.0", + }, + }, + "apps/strategy-service": { + "name": "@stock-bot/strategy-service", + "version": "1.0.0", + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/data-frame": "*", + "@stock-bot/event-bus": "*", + "@stock-bot/logger": "*", + "@stock-bot/questdb-client": "*", + "@stock-bot/strategy-engine": "*", + "@stock-bot/types": "*", + "@stock-bot/utils": "*", + "@stock-bot/vector-engine": "*", + "commander": "^11.0.0", + "hono": "^4.0.0", + }, + "devDependencies": { + "@types/node": "^20.0.0", + "typescript": "^5.0.0", + }, + }, + "libs/cache": { + "name": "@stock-bot/cache", + "version": "1.0.0", + "dependencies": { + "ioredis": "^5.3.2", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "bun-types": "^1.2.15", + "typescript": "^5.3.0", + }, + }, + "libs/config": { + "name": "@stock-bot/config", + "version": "1.0.0", + "dependencies": { + "dotenv": "^16.5.0", + "yup": "^1.6.1", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15", + "eslint": "^8.56.0", + "typescript": "^5.3.0", + }, + }, + "libs/data-adjustments": { + "name": "@stock-bot/data-adjustments", + "version": "1.0.0", + "dependencies": { + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + }, + "devDependencies": { + "bun-types": "^1.1.12", + "typescript": "^5.4.5", + }, + "peerDependencies": { + "typescript": "^5.0.0", + }, + }, + "libs/data-frame": { + "name": "@stock-bot/data-frame", + "version": "1.0.0", + "dependencies": { + "@stock-bot/logger": "*", + "@stock-bot/utils": "*", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "bun-types": "^1.2.15", + "typescript": "^5.3.0", + }, + }, + "libs/event-bus": { + "name": "@stock-bot/event-bus", + "version": "1.0.0", + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "eventemitter3": "^5.0.1", + "ioredis": "^5.3.2", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "bun-types": "^1.2.15", + "typescript": "^5.3.0", + }, + }, + "libs/http": { + "name": "@stock-bot/http", + "version": "1.0.0", + "dependencies": { + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "axios": "^1.9.0", + "http-proxy-agent": "^7.0.2", + "https-proxy-agent": "^7.0.6", + "socks-proxy-agent": "^8.0.5", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15", + "eslint": "^8.56.0", + "typescript": "^5.3.0", + }, + }, + "libs/logger": { + "name": "@stock-bot/logger", + "version": "1.0.0", + "dependencies": { + "@stock-bot/config": "*", + "got": "^14.4.7", + "pino": "^9.7.0", + "pino-loki": "^2.6.0", + "pino-pretty": "^13.0.0", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "bun-types": "^1.2.15", + "typescript": "^5.3.0", + }, + }, + "libs/mongodb-client": { + "name": "@stock-bot/mongodb-client", + "version": "1.0.0", + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "mongodb": "^6.17.0", + "yup": "^1.6.1", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15", + "eslint": "^8.56.0", + "typescript": "^5.3.0", + }, + }, + "libs/postgres-client": { + "name": "@stock-bot/postgres-client", + "version": "1.0.0", + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "pg": "^8.11.3", + "yup": "^1.6.1", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "@types/pg": "^8.10.7", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15", + "eslint": "^8.56.0", + "typescript": "^5.3.0", + }, + }, + "libs/questdb-client": { + "name": "@stock-bot/questdb-client", + "version": "1.0.0", + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15", + "eslint": "^8.56.0", + "typescript": "^5.3.0", + }, + }, + "libs/shutdown": { + "name": "@stock-bot/shutdown", + "version": "1.0.0", + "devDependencies": { + "@types/node": "^20.0.0", + "typescript": "^5.0.0", + }, + }, + "libs/strategy-engine": { + "name": "@stock-bot/strategy-engine", + "version": "1.0.0", + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/data-frame": "*", + "@stock-bot/event-bus": "*", + "@stock-bot/logger": "*", + "@stock-bot/utils": "*", + "commander": "^14.0.0", + "eventemitter3": "^5.0.1", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "bun-types": "^1.2.15", + "typescript": "^5.3.0", + }, + }, + "libs/types": { + "name": "@stock-bot/types", + "version": "1.0.0", + "devDependencies": { + "@types/node": "^20.11.0", + "bun-types": "^1.2.15", + "typescript": "^5.3.0", + }, + }, + "libs/utils": { + "name": "@stock-bot/utils", + "version": "1.0.0", + "dependencies": { + "@stock-bot/types": "*", + "date-fns": "^2.30.0", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "bun-types": "^1.2.15", + "typescript": "^5.3.0", + }, + }, + "libs/vector-engine": { + "name": "@stock-bot/vector-engine", + "version": "1.0.0", + "dependencies": { + "@stock-bot/data-frame": "*", + "@stock-bot/logger": "*", + "@stock-bot/utils": "*", + }, + "devDependencies": { + "@types/node": "^20.11.0", + "bun-types": "^1.2.15", + "typescript": "^5.3.0", + }, + }, + }, + "trustedDependencies": [ + "mongodb", + ], + "packages": { + "@alloc/quick-lru": ["@alloc/quick-lru@5.2.0", "", {}, "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw=="], + + "@ampproject/remapping": ["@ampproject/remapping@2.3.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw=="], + + "@angular-devkit/architect": ["@angular-devkit/architect@0.2000.1", "", { "dependencies": { "@angular-devkit/core": "20.0.1", "rxjs": "7.8.2" } }, "sha512-EcOGU1xEhARYpDF391VaeUg/+YRym9OxzJMcc0rSHl3YLK8/m+24ap2YAQY5N7n9+mmEqHVu/q31ldFpOoMCTw=="], + + "@angular-devkit/core": ["@angular-devkit/core@20.0.1", "", { "dependencies": { "ajv": "8.17.1", "ajv-formats": "3.0.1", "jsonc-parser": "3.3.1", "picomatch": "4.0.2", "rxjs": "7.8.2", "source-map": "0.7.4" }, "peerDependencies": { "chokidar": "^4.0.0" }, "optionalPeers": ["chokidar"] }, "sha512-Ilafyj8JVwq3NZsaiGw5UDkP4EAkGKiEvZ4TC3WVidZbM4EpKt9/Jd7ZpsTRGDLG429U+fGhay+ZQeCFGqy5rA=="], + + "@angular-devkit/schematics": ["@angular-devkit/schematics@20.0.1", "", { "dependencies": { "@angular-devkit/core": "20.0.1", "jsonc-parser": "3.3.1", "magic-string": "0.30.17", "ora": "8.2.0", "rxjs": "7.8.2" } }, "sha512-bSr/5YIdjtwKYqylkYrlOVP+tuFz+tfOldmLfWHAsDGnJUznb5t4ckx6yyROp+iDQfu2Aez09p+l4KfUBq+H9A=="], + + "@angular/animations": ["@angular/animations@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/common": "20.0.2", "@angular/core": "20.0.2" } }, "sha512-p9TqZdVOFWMF75lfxk++5GZOBGO3K7qVdAXiQw89VLac8yqsu9iXFlcq34x256McHxONTjrrKBeP5oU1T8rxCw=="], + + "@angular/build": ["@angular/build@20.0.1", "", { "dependencies": { "@ampproject/remapping": "2.3.0", "@angular-devkit/architect": "0.2000.1", "@babel/core": "7.27.1", "@babel/helper-annotate-as-pure": "7.27.1", "@babel/helper-split-export-declaration": "7.24.7", "@inquirer/confirm": "5.1.10", "@vitejs/plugin-basic-ssl": "2.0.0", "beasties": "0.3.4", "browserslist": "^4.23.0", "esbuild": "0.25.5", "https-proxy-agent": "7.0.6", "istanbul-lib-instrument": "6.0.3", "jsonc-parser": "3.3.1", "listr2": "8.3.3", "magic-string": "0.30.17", "mrmime": "2.0.1", "parse5-html-rewriting-stream": "7.1.0", "picomatch": "4.0.2", "piscina": "5.0.0", "rollup": "4.40.2", "sass": "1.88.0", "semver": "7.7.2", "source-map-support": "0.5.21", "tinyglobby": "0.2.13", "vite": "6.3.5", "watchpack": "2.4.2" }, "optionalDependencies": { "lmdb": "3.3.0" }, "peerDependencies": { "@angular/compiler": "^20.0.0", "@angular/compiler-cli": "^20.0.0", "@angular/core": "^20.0.0", "@angular/localize": "^20.0.0", "@angular/platform-browser": "^20.0.0", "@angular/platform-server": "^20.0.0", "@angular/service-worker": "^20.0.0", "@angular/ssr": "^20.0.1", "karma": "^6.4.0", "less": "^4.2.0", "ng-packagr": "^20.0.0", "postcss": "^8.4.0", "tailwindcss": "^2.0.0 || ^3.0.0 || ^4.0.0", "tslib": "^2.3.0", "typescript": ">=5.8 <5.9", "vitest": "^3.1.1" }, "optionalPeers": ["@angular/core", "@angular/localize", "@angular/platform-browser", "@angular/platform-server", "@angular/service-worker", "@angular/ssr", "karma", "less", "ng-packagr", "postcss", "tailwindcss", "vitest"] }, "sha512-m/0jtXIeOaoU/WXtMLRuvq7UaGRxNHpoRKVVoJrifvZuNBYGM4e2lzxlIlo8kiQhPpZQc0zcAMoosbmzKKdkUQ=="], + + "@angular/cdk": ["@angular/cdk@20.0.2", "", { "dependencies": { "parse5": "^7.1.2", "tslib": "^2.3.0" }, "peerDependencies": { "@angular/common": "^20.0.0 || ^21.0.0", "@angular/core": "^20.0.0 || ^21.0.0", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-gRQcpTNhnwBxXSmpnrljODUHQmB2Hnxc6L2Ad6mSMV+c3opd9KIFxL5eG2WOOPHGAaPrV4gNFw+t1i01U4grTg=="], + + "@angular/cli": ["@angular/cli@20.0.1", "", { "dependencies": { "@angular-devkit/architect": "0.2000.1", "@angular-devkit/core": "20.0.1", "@angular-devkit/schematics": "20.0.1", "@inquirer/prompts": "7.5.1", "@listr2/prompt-adapter-inquirer": "2.0.22", "@schematics/angular": "20.0.1", "@yarnpkg/lockfile": "1.1.0", "ini": "5.0.0", "jsonc-parser": "3.3.1", "listr2": "8.3.3", "npm-package-arg": "12.0.2", "npm-pick-manifest": "10.0.0", "pacote": "21.0.0", "resolve": "1.22.10", "semver": "7.7.2", "yargs": "17.7.2" }, "bin": { "ng": "bin/ng.js" } }, "sha512-OU91byvG/WsDDUVmXIJr3/sU89U6g8G8IXrqgVRVPgjXKEQMnUNBlmygD2rMUR5C02g2lGc6s2j0hnOJ/dDNOw=="], + + "@angular/common": ["@angular/common@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/core": "20.0.2", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-dqzKFL2MgPpQiaY9ZyDhGZYWEXblsqofW6czH/+HkmlNgSmDCBaY/UhNQShxNQ0KQbR1o08OWuQr29zxkY1CMA=="], + + "@angular/compiler": ["@angular/compiler@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" } }, "sha512-BJYXGUZaY9awYvgt0w9TDq73A1+m8W5eMRn/krWeQcfWakwTgs27BSxmhfJhD45KrMrky5yxAvGgqSfMKrLeng=="], + + "@angular/compiler-cli": ["@angular/compiler-cli@20.0.2", "", { "dependencies": { "@babel/core": "7.27.4", "@jridgewell/sourcemap-codec": "^1.4.14", "chokidar": "^4.0.0", "convert-source-map": "^1.5.1", "reflect-metadata": "^0.2.0", "semver": "^7.0.0", "tslib": "^2.3.0", "yargs": "^18.0.0" }, "peerDependencies": { "@angular/compiler": "20.0.2", "typescript": ">=5.8 <5.9" }, "optionalPeers": ["typescript"], "bin": { "ngc": "bundles/src/bin/ngc.js", "ng-xi18n": "bundles/src/bin/ng_xi18n.js" } }, "sha512-kVKHS5ZRadTR+rRuBl3Dsccsv/jiHXdJJYlDQwQW87afd4RtAu75P3RsSd8jaUj+7P9O4Ve4vwCZVtgOh0yxbw=="], + + "@angular/core": ["@angular/core@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/compiler": "20.0.2", "rxjs": "^6.5.3 || ^7.4.0", "zone.js": "~0.15.0" }, "optionalPeers": ["@angular/compiler", "zone.js"] }, "sha512-z9L8WPrHTkfupHtpO6aW4KqcqigIhxcQwCaEMgXWc5WJkoiMJSfo/dk+cyiGjCfTkc5Y6DO6f6ERi0IWYWWbPA=="], + + "@angular/forms": ["@angular/forms@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/common": "20.0.2", "@angular/core": "20.0.2", "@angular/platform-browser": "20.0.2", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-RrQKwzFZsEDXsvesNXS4XxndEKZHC+VexIdRr1vlxx7isfvpl4htOxceW0D+Gvku1mnaS99eB/AWS50HxW3B3Q=="], + + "@angular/material": ["@angular/material@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/cdk": "20.0.2", "@angular/common": "^20.0.0 || ^21.0.0", "@angular/core": "^20.0.0 || ^21.0.0", "@angular/forms": "^20.0.0 || ^21.0.0", "@angular/platform-browser": "^20.0.0 || ^21.0.0", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-yIXvF+LjFdHjJWyvn1SxbWB9LdNxYnqEKbKzminW4WPXlPJMOAeyhEDFeQv9W92Zv+/ibS4tI3/SD759ejb45g=="], + + "@angular/platform-browser": ["@angular/platform-browser@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/animations": "20.0.2", "@angular/common": "20.0.2", "@angular/core": "20.0.2" }, "optionalPeers": ["@angular/animations"] }, "sha512-4adMQSVlwxjY9z/LEk3Q5hr4/qbM9UD9FcqbyZOt3+BL+F2GwGdKzwg6Dj4Dv0Tv8/dudNSVgHc8lIdQ4C7K1w=="], + + "@angular/router": ["@angular/router@20.0.2", "", { "dependencies": { "tslib": "^2.3.0" }, "peerDependencies": { "@angular/common": "20.0.2", "@angular/core": "20.0.2", "@angular/platform-browser": "20.0.2", "rxjs": "^6.5.3 || ^7.4.0" } }, "sha512-UyuTeoXkkZw1eFFNwrTfb1JXow6HKVdLNb3n9MhqDz+3ekdiqDH8EBaKhxYZxlcpNoa6cNbECZJYtaHy1lw38g=="], + + "@babel/code-frame": ["@babel/code-frame@7.27.1", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg=="], + + "@babel/compat-data": ["@babel/compat-data@7.27.5", "", {}, "sha512-KiRAp/VoJaWkkte84TvUd9qjdbZAdiqyvMxrGl1N6vzFogKmaLgoM3L1kgtLicp2HP5fBJS8JrZKLVIZGVJAVg=="], + + "@babel/core": ["@babel/core@7.27.1", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.1", "@babel/helper-compilation-targets": "^7.27.1", "@babel/helper-module-transforms": "^7.27.1", "@babel/helpers": "^7.27.1", "@babel/parser": "^7.27.1", "@babel/template": "^7.27.1", "@babel/traverse": "^7.27.1", "@babel/types": "^7.27.1", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-IaaGWsQqfsQWVLqMn9OB92MNN7zukfVA4s7KKAI0KfrrDsZ0yhi5uV4baBuLuN7n3vsZpwP8asPPcVwApxvjBQ=="], + + "@babel/generator": ["@babel/generator@7.27.5", "", { "dependencies": { "@babel/parser": "^7.27.5", "@babel/types": "^7.27.3", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^3.0.2" } }, "sha512-ZGhA37l0e/g2s1Cnzdix0O3aLYm66eF8aufiVteOgnwxgnRP8GoyMj7VWsgWnQbVKXyge7hqrFh2K2TQM6t1Hw=="], + + "@babel/helper-annotate-as-pure": ["@babel/helper-annotate-as-pure@7.27.1", "", { "dependencies": { "@babel/types": "^7.27.1" } }, "sha512-WnuuDILl9oOBbKnb4L+DyODx7iC47XfzmNCpTttFsSp6hTG7XZxu60+4IO+2/hPfcGOoKbFiwoI/+zwARbNQow=="], + + "@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.27.2", "", { "dependencies": { "@babel/compat-data": "^7.27.2", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" } }, "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ=="], + + "@babel/helper-module-imports": ["@babel/helper-module-imports@7.27.1", "", { "dependencies": { "@babel/traverse": "^7.27.1", "@babel/types": "^7.27.1" } }, "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w=="], + + "@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.27.3", "", { "dependencies": { "@babel/helper-module-imports": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1", "@babel/traverse": "^7.27.3" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg=="], + + "@babel/helper-split-export-declaration": ["@babel/helper-split-export-declaration@7.24.7", "", { "dependencies": { "@babel/types": "^7.24.7" } }, "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA=="], + + "@babel/helper-string-parser": ["@babel/helper-string-parser@7.27.1", "", {}, "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA=="], + + "@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.27.1", "", {}, "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow=="], + + "@babel/helper-validator-option": ["@babel/helper-validator-option@7.27.1", "", {}, "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg=="], + + "@babel/helpers": ["@babel/helpers@7.27.6", "", { "dependencies": { "@babel/template": "^7.27.2", "@babel/types": "^7.27.6" } }, "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug=="], + + "@babel/parser": ["@babel/parser@7.27.5", "", { "dependencies": { "@babel/types": "^7.27.3" }, "bin": "./bin/babel-parser.js" }, "sha512-OsQd175SxWkGlzbny8J3K8TnnDD0N3lrIUtB92xwyRpzaenGZhxDvxN/JgU00U3CDZNj9tPuDJ5H0WS4Nt3vKg=="], + + "@babel/runtime": ["@babel/runtime@7.27.6", "", {}, "sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q=="], + + "@babel/template": ["@babel/template@7.27.2", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/parser": "^7.27.2", "@babel/types": "^7.27.1" } }, "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw=="], + + "@babel/traverse": ["@babel/traverse@7.27.4", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/types": "^7.27.3", "debug": "^4.3.1", "globals": "^11.1.0" } }, "sha512-oNcu2QbHqts9BtOWJosOVJapWjBDSxGCpFvikNR5TGDYDQf3JwpIoMzIKrvfoti93cLfPJEG4tH9SPVeyCGgdA=="], + + "@babel/types": ["@babel/types@7.27.6", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1" } }, "sha512-ETyHEk2VHHvl9b9jZP5IHPavHYk57EhanlRRuae9XCpb/j5bDCbPPMOBfCWhnl/7EDJz0jEMCi/RhccCE8r1+Q=="], + + "@balena/dockerignore": ["@balena/dockerignore@1.0.2", "", {}, "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q=="], + + "@colors/colors": ["@colors/colors@1.5.0", "", {}, "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ=="], + + "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-9o3TMmpmftaCMepOdA5k/yDw8SfInyzWWTjYTFCX3kPSDJMROQTb8jg+h9Cnwnmm1vOzvxN7gIfB5V2ewpjtGA=="], + + "@esbuild/android-arm": ["@esbuild/android-arm@0.25.5", "", { "os": "android", "cpu": "arm" }, "sha512-AdJKSPeEHgi7/ZhuIPtcQKr5RQdo6OO2IL87JkianiMYMPbCtot9fxPbrMiBADOWWm3T2si9stAiVsGbTQFkbA=="], + + "@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.5", "", { "os": "android", "cpu": "arm64" }, "sha512-VGzGhj4lJO+TVGV1v8ntCZWJktV7SGCs3Pn1GRWI1SBFtRALoomm8k5E9Pmwg3HOAal2VDc2F9+PM/rEY6oIDg=="], + + "@esbuild/android-x64": ["@esbuild/android-x64@0.25.5", "", { "os": "android", "cpu": "x64" }, "sha512-D2GyJT1kjvO//drbRT3Hib9XPwQeWd9vZoBJn+bu/lVsOZ13cqNdDeqIF/xQ5/VmWvMduP6AmXvylO/PIc2isw=="], + + "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-GtaBgammVvdF7aPIgH2jxMDdivezgFu6iKpmT+48+F8Hhg5J/sfnDieg0aeG/jfSvkYQU2/pceFPDKlqZzwnfQ=="], + + "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.5", "", { "os": "darwin", "cpu": "x64" }, "sha512-1iT4FVL0dJ76/q1wd7XDsXrSW+oLoquptvh4CLR4kITDtqi2e/xwXwdCVH8hVHU43wgJdsq7Gxuzcs6Iq/7bxQ=="], + + "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.5", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-nk4tGP3JThz4La38Uy/gzyXtpkPW8zSAmoUhK9xKKXdBCzKODMc2adkB2+8om9BDYugz+uGV7sLmpTYzvmz6Sw=="], + + "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.5", "", { "os": "freebsd", "cpu": "x64" }, "sha512-PrikaNjiXdR2laW6OIjlbeuCPrPaAl0IwPIaRv+SMV8CiM8i2LqVUHFC1+8eORgWyY7yhQY+2U2fA55mBzReaw=="], + + "@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.5", "", { "os": "linux", "cpu": "arm" }, "sha512-cPzojwW2okgh7ZlRpcBEtsX7WBuqbLrNXqLU89GxWbNt6uIg78ET82qifUy3W6OVww6ZWobWub5oqZOVtwolfw=="], + + "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-Z9kfb1v6ZlGbWj8EJk9T6czVEjjq2ntSYLY2cw6pAZl4oKtfgQuS4HOq41M/BcoLPzrUbNd+R4BXFyH//nHxVg=="], + + "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.5", "", { "os": "linux", "cpu": "ia32" }, "sha512-sQ7l00M8bSv36GLV95BVAdhJ2QsIbCuCjh/uYrWiMQSUuV+LpXwIqhgJDcvMTj+VsQmqAHL2yYaasENvJ7CDKA=="], + + "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.5", "", { "os": "linux", "cpu": "none" }, "sha512-0ur7ae16hDUC4OL5iEnDb0tZHDxYmuQyhKhsPBV8f99f6Z9KQM02g33f93rNH5A30agMS46u2HP6qTdEt6Q1kg=="], + + "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.5", "", { "os": "linux", "cpu": "none" }, "sha512-kB/66P1OsHO5zLz0i6X0RxlQ+3cu0mkxS3TKFvkb5lin6uwZ/ttOkP3Z8lfR9mJOBk14ZwZ9182SIIWFGNmqmg=="], + + "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.5", "", { "os": "linux", "cpu": "ppc64" }, "sha512-UZCmJ7r9X2fe2D6jBmkLBMQetXPXIsZjQJCjgwpVDz+YMcS6oFR27alkgGv3Oqkv07bxdvw7fyB71/olceJhkQ=="], + + "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.5", "", { "os": "linux", "cpu": "none" }, "sha512-kTxwu4mLyeOlsVIFPfQo+fQJAV9mh24xL+y+Bm6ej067sYANjyEw1dNHmvoqxJUCMnkBdKpvOn0Ahql6+4VyeA=="], + + "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.5", "", { "os": "linux", "cpu": "s390x" }, "sha512-K2dSKTKfmdh78uJ3NcWFiqyRrimfdinS5ErLSn3vluHNeHVnBAFWC8a4X5N+7FgVE1EjXS1QDZbpqZBjfrqMTQ=="], + + "@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.5", "", { "os": "linux", "cpu": "x64" }, "sha512-uhj8N2obKTE6pSZ+aMUbqq+1nXxNjZIIjCjGLfsWvVpy7gKCOL6rsY1MhRh9zLtUtAI7vpgLMK6DxjO8Qm9lJw=="], + + "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.5", "", { "os": "none", "cpu": "arm64" }, "sha512-pwHtMP9viAy1oHPvgxtOv+OkduK5ugofNTVDilIzBLpoWAM16r7b/mxBvfpuQDpRQFMfuVr5aLcn4yveGvBZvw=="], + + "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.5", "", { "os": "none", "cpu": "x64" }, "sha512-WOb5fKrvVTRMfWFNCroYWWklbnXH0Q5rZppjq0vQIdlsQKuw6mdSihwSo4RV/YdQ5UCKKvBy7/0ZZYLBZKIbwQ=="], + + "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.5", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-7A208+uQKgTxHd0G0uqZO8UjK2R0DDb4fDmERtARjSHWxqMTye4Erz4zZafx7Di9Cv+lNHYuncAkiGFySoD+Mw=="], + + "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.5", "", { "os": "openbsd", "cpu": "x64" }, "sha512-G4hE405ErTWraiZ8UiSoesH8DaCsMm0Cay4fsFWOOUcz8b8rC6uCvnagr+gnioEjWn0wC+o1/TAHt+It+MpIMg=="], + + "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.5", "", { "os": "sunos", "cpu": "x64" }, "sha512-l+azKShMy7FxzY0Rj4RCt5VD/q8mG/e+mDivgspo+yL8zW7qEwctQ6YqKX34DTEleFAvCIUviCFX1SDZRSyMQA=="], + + "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-O2S7SNZzdcFG7eFKgvwUEZ2VG9D/sn/eIiz8XRZ1Q/DO5a3s76Xv0mdBzVM5j5R639lXQmPmSo0iRpHqUUrsxw=="], + + "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-onOJ02pqs9h1iMJ1PQphR+VZv8qBMQ77Klcsqv9CNW2w6yLqoURLcgERAIurY6QE63bbLuqgP9ATqajFLK5AMQ=="], + + "@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.5", "", { "os": "win32", "cpu": "x64" }, "sha512-TXv6YnJ8ZMVdX+SXWVBo/0p8LTcrUYngpWjvm91TMjjBQii7Oz11Lw5lbDV5Y0TzuhSJHwiH4hEtC1I42mMS0g=="], + + "@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.7.0", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw=="], + + "@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.1", "", {}, "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="], + + "@eslint/eslintrc": ["@eslint/eslintrc@2.1.4", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^9.6.0", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ=="], + + "@eslint/js": ["@eslint/js@8.57.1", "", {}, "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q=="], + + "@fastify/busboy": ["@fastify/busboy@2.1.1", "", {}, "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA=="], + + "@grpc/grpc-js": ["@grpc/grpc-js@1.13.4", "", { "dependencies": { "@grpc/proto-loader": "^0.7.13", "@js-sdsl/ordered-map": "^4.4.2" } }, "sha512-GsFaMXCkMqkKIvwCQjCrwH+GHbPKBjhwo/8ZuUkWHqbI73Kky9I+pQltrlT0+MWpedCoosda53lgjYfyEPgxBg=="], + + "@grpc/proto-loader": ["@grpc/proto-loader@0.7.15", "", { "dependencies": { "lodash.camelcase": "^4.3.0", "long": "^5.0.0", "protobufjs": "^7.2.5", "yargs": "^17.7.2" }, "bin": { "proto-loader-gen-types": "build/bin/proto-loader-gen-types.js" } }, "sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ=="], + + "@hono/node-server": ["@hono/node-server@1.14.4", "", { "peerDependencies": { "hono": "^4" } }, "sha512-DnxpshhYewr2q9ZN8ez/M5mmc3sucr8CT1sIgIy1bkeUXut9XWDkqHoFHRhWIQgkYnKpVRxunyhK7WzpJeJ6qQ=="], + + "@humanwhocodes/config-array": ["@humanwhocodes/config-array@0.13.0", "", { "dependencies": { "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" } }, "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw=="], + + "@humanwhocodes/module-importer": ["@humanwhocodes/module-importer@1.0.1", "", {}, "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="], + + "@humanwhocodes/object-schema": ["@humanwhocodes/object-schema@2.0.3", "", {}, "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA=="], + + "@inquirer/checkbox": ["@inquirer/checkbox@4.1.8", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-d/QAsnwuHX2OPolxvYcgSj7A9DO9H6gVOy2DvBTx+P2LH2iRTo/RSGV3iwCzW024nP9hw98KIuDmdyhZQj1UQg=="], + + "@inquirer/confirm": ["@inquirer/confirm@5.1.10", "", { "dependencies": { "@inquirer/core": "^10.1.11", "@inquirer/type": "^3.0.6" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-FxbQ9giWxUWKUk2O5XZ6PduVnH2CZ/fmMKMBkH71MHJvWr7WL5AHKevhzF1L5uYWB2P548o1RzVxrNd3dpmk6g=="], + + "@inquirer/core": ["@inquirer/core@10.1.13", "", { "dependencies": { "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "cli-width": "^4.1.0", "mute-stream": "^2.0.0", "signal-exit": "^4.1.0", "wrap-ansi": "^6.2.0", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-1viSxebkYN2nJULlzCxES6G9/stgHSepZ9LqqfdIGPHj5OHhiBUXVS0a6R0bEC2A+VL4D9w6QB66ebCr6HGllA=="], + + "@inquirer/editor": ["@inquirer/editor@4.2.13", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7", "external-editor": "^3.1.0" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-WbicD9SUQt/K8O5Vyk9iC2ojq5RHoCLK6itpp2fHsWe44VxxcA9z3GTWlvjSTGmMQpZr+lbVmrxdHcumJoLbMA=="], + + "@inquirer/expand": ["@inquirer/expand@4.0.15", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-4Y+pbr/U9Qcvf+N/goHzPEXiHH8680lM3Dr3Y9h9FFw4gHS+zVpbj8LfbKWIb/jayIB4aSO4pWiBTrBYWkvi5A=="], + + "@inquirer/figures": ["@inquirer/figures@1.0.12", "", {}, "sha512-MJttijd8rMFcKJC8NYmprWr6hD3r9Gd9qUC0XwPNwoEPWSMVJwA2MlXxF+nhZZNMY+HXsWa+o7KY2emWYIn0jQ=="], + + "@inquirer/input": ["@inquirer/input@4.1.12", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-xJ6PFZpDjC+tC1P8ImGprgcsrzQRsUh9aH3IZixm1lAZFK49UGHxM3ltFfuInN2kPYNfyoPRh+tU4ftsjPLKqQ=="], + + "@inquirer/number": ["@inquirer/number@3.0.15", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-xWg+iYfqdhRiM55MvqiTCleHzszpoigUpN5+t1OMcRkJrUrw7va3AzXaxvS+Ak7Gny0j2mFSTv2JJj8sMtbV2g=="], + + "@inquirer/password": ["@inquirer/password@4.0.15", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-75CT2p43DGEnfGTaqFpbDC2p2EEMrq0S+IRrf9iJvYreMy5mAWj087+mdKyLHapUEPLjN10mNvABpGbk8Wdraw=="], + + "@inquirer/prompts": ["@inquirer/prompts@7.5.1", "", { "dependencies": { "@inquirer/checkbox": "^4.1.6", "@inquirer/confirm": "^5.1.10", "@inquirer/editor": "^4.2.11", "@inquirer/expand": "^4.0.13", "@inquirer/input": "^4.1.10", "@inquirer/number": "^3.0.13", "@inquirer/password": "^4.0.13", "@inquirer/rawlist": "^4.1.1", "@inquirer/search": "^3.0.13", "@inquirer/select": "^4.2.1" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-5AOrZPf2/GxZ+SDRZ5WFplCA2TAQgK3OYrXCYmJL5NaTu4ECcoWFlfUZuw7Es++6Njv7iu/8vpYJhuzxUH76Vg=="], + + "@inquirer/rawlist": ["@inquirer/rawlist@4.1.3", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/type": "^3.0.7", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-7XrV//6kwYumNDSsvJIPeAqa8+p7GJh7H5kRuxirct2cgOcSWwwNGoXDRgpNFbY/MG2vQ4ccIWCi8+IXXyFMZA=="], + + "@inquirer/search": ["@inquirer/search@3.0.15", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-YBMwPxYBrADqyvP4nNItpwkBnGGglAvCLVW8u4pRmmvOsHUtCAUIMbUrLX5B3tFL1/WsLGdQ2HNzkqswMs5Uaw=="], + + "@inquirer/select": ["@inquirer/select@4.2.3", "", { "dependencies": { "@inquirer/core": "^10.1.13", "@inquirer/figures": "^1.0.12", "@inquirer/type": "^3.0.7", "ansi-escapes": "^4.3.2", "yoctocolors-cjs": "^2.1.2" }, "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-OAGhXU0Cvh0PhLz9xTF/kx6g6x+sP+PcyTiLvCrewI99P3BBeexD+VbuwkNDvqGkk3y2h5ZiWLeRP7BFlhkUDg=="], + + "@inquirer/type": ["@inquirer/type@3.0.7", "", { "peerDependencies": { "@types/node": ">=18" }, "optionalPeers": ["@types/node"] }, "sha512-PfunHQcjwnju84L+ycmcMKB/pTPIngjUJvfnRhKY6FKPuYXlM4aQCb/nIdTFR6BEhMjFvngzvng/vBAJMZpLSA=="], + + "@ioredis/commands": ["@ioredis/commands@1.2.0", "", {}, "sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg=="], + + "@isaacs/cliui": ["@isaacs/cliui@8.0.2", "", { "dependencies": { "string-width": "^5.1.2", "string-width-cjs": "npm:string-width@^4.2.0", "strip-ansi": "^7.0.1", "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", "wrap-ansi": "^8.1.0", "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" } }, "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="], + + "@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="], + + "@istanbuljs/schema": ["@istanbuljs/schema@0.1.3", "", {}, "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA=="], + + "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="], + + "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], + + "@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="], + + "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="], + + "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="], + + "@js-sdsl/ordered-map": ["@js-sdsl/ordered-map@4.4.2", "", {}, "sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw=="], + + "@listr2/prompt-adapter-inquirer": ["@listr2/prompt-adapter-inquirer@2.0.22", "", { "dependencies": { "@inquirer/type": "^1.5.5" }, "peerDependencies": { "@inquirer/prompts": ">= 3 < 8" } }, "sha512-hV36ZoY+xKL6pYOt1nPNnkciFkn89KZwqLhAFzJvYysAvL5uBQdiADZx/8bIDXIukzzwG0QlPYolgMzQUtKgpQ=="], + + "@lmdb/lmdb-darwin-arm64": ["@lmdb/lmdb-darwin-arm64@3.3.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-LipbQobyEfQtu8WixasaFUZZ+JCGlho4OWwWIQ5ol0rB1RKkcZvypu7sS1CBvofBGVAa3vbOh8IOGQMrbmL5dg=="], + + "@lmdb/lmdb-darwin-x64": ["@lmdb/lmdb-darwin-x64@3.3.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-yA+9P+ZeA3vg76BLXWeUomIAjxfmSmR2eg8fueHXDg5Xe1Xmkl9JCKuHXUhtJ+mMVcH12d5k4kJBLbyXTadfGQ=="], + + "@lmdb/lmdb-linux-arm": ["@lmdb/lmdb-linux-arm@3.3.0", "", { "os": "linux", "cpu": "arm" }, "sha512-EDYrW9kle+8wI19JCj/PhRnGoCN9bked5cdOPdo1wdgH/HzjgoLPFTn9DHlZccgTEVhp3O+bpWXdN/rWySVvjw=="], + + "@lmdb/lmdb-linux-arm64": ["@lmdb/lmdb-linux-arm64@3.3.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-OeWvSgjXXZ/zmtLqqL78I3910F6UYpUubmsUU+iBHo6nTtjkpXms95rJtGrjkWQqwswKBD7xSMplbYC4LEsiPA=="], + + "@lmdb/lmdb-linux-x64": ["@lmdb/lmdb-linux-x64@3.3.0", "", { "os": "linux", "cpu": "x64" }, "sha512-wDd02mt5ScX4+xd6g78zKBr6ojpgCJCTrllCAabjgap5FzuETqOqaQfKhO+tJuGWv/J5q+GIds6uY7rNFueOxg=="], + + "@lmdb/lmdb-win32-arm64": ["@lmdb/lmdb-win32-arm64@3.3.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-COotWhHJgzXULLiEjOgWQwqig6PoA+6ji6W+sDl6M1HhMXWIymEVHGs0edsVSNtsNSCAWMxJgR3asv6FNX/2EA=="], + + "@lmdb/lmdb-win32-x64": ["@lmdb/lmdb-win32-x64@3.3.0", "", { "os": "win32", "cpu": "x64" }, "sha512-kqUgQH+l8HDbkAapx+aoko7Ez4X4DqkIraOqY/k0QY5EN/iialVlFpBUXh4wFXzirdmEVjbIUMrceUh0Kh8LeA=="], + + "@mongodb-js/saslprep": ["@mongodb-js/saslprep@1.2.2", "", { "dependencies": { "sparse-bitfield": "^3.0.3" } }, "sha512-EB0O3SCSNRUFk66iRCpI+cXzIjdswfCs7F6nOC3RAGJ7xr5YhaicvsRwJ9eyzYvYRlCSDUO/c7g4yNulxKC1WA=="], + + "@msgpackr-extract/msgpackr-extract-darwin-arm64": ["@msgpackr-extract/msgpackr-extract-darwin-arm64@3.0.3", "", { "os": "darwin", "cpu": "arm64" }, "sha512-QZHtlVgbAdy2zAqNA9Gu1UpIuI8Xvsd1v8ic6B2pZmeFnFcMWiPLfWXh7TVw4eGEZ/C9TH281KwhVoeQUKbyjw=="], + + "@msgpackr-extract/msgpackr-extract-darwin-x64": ["@msgpackr-extract/msgpackr-extract-darwin-x64@3.0.3", "", { "os": "darwin", "cpu": "x64" }, "sha512-mdzd3AVzYKuUmiWOQ8GNhl64/IoFGol569zNRdkLReh6LRLHOXxU4U8eq0JwaD8iFHdVGqSy4IjFL4reoWCDFw=="], + + "@msgpackr-extract/msgpackr-extract-linux-arm": ["@msgpackr-extract/msgpackr-extract-linux-arm@3.0.3", "", { "os": "linux", "cpu": "arm" }, "sha512-fg0uy/dG/nZEXfYilKoRe7yALaNmHoYeIoJuJ7KJ+YyU2bvY8vPv27f7UKhGRpY6euFYqEVhxCFZgAUNQBM3nw=="], + + "@msgpackr-extract/msgpackr-extract-linux-arm64": ["@msgpackr-extract/msgpackr-extract-linux-arm64@3.0.3", "", { "os": "linux", "cpu": "arm64" }, "sha512-YxQL+ax0XqBJDZiKimS2XQaf+2wDGVa1enVRGzEvLLVFeqa5kx2bWbtcSXgsxjQB7nRqqIGFIcLteF/sHeVtQg=="], + + "@msgpackr-extract/msgpackr-extract-linux-x64": ["@msgpackr-extract/msgpackr-extract-linux-x64@3.0.3", "", { "os": "linux", "cpu": "x64" }, "sha512-cvwNfbP07pKUfq1uH+S6KJ7dT9K8WOE4ZiAcsrSes+UY55E/0jLYc+vq+DO7jlmqRb5zAggExKm0H7O/CBaesg=="], + + "@msgpackr-extract/msgpackr-extract-win32-x64": ["@msgpackr-extract/msgpackr-extract-win32-x64@3.0.3", "", { "os": "win32", "cpu": "x64" }, "sha512-x0fWaQtYp4E6sktbsdAqnehxDgEc/VwM7uLsRCYWaiGu0ykYdZPiS8zCWdnjHwyiumousxfBm4SO31eXqwEZhQ=="], + + "@napi-rs/nice": ["@napi-rs/nice@1.0.1", "", { "optionalDependencies": { "@napi-rs/nice-android-arm-eabi": "1.0.1", "@napi-rs/nice-android-arm64": "1.0.1", "@napi-rs/nice-darwin-arm64": "1.0.1", "@napi-rs/nice-darwin-x64": "1.0.1", "@napi-rs/nice-freebsd-x64": "1.0.1", "@napi-rs/nice-linux-arm-gnueabihf": "1.0.1", "@napi-rs/nice-linux-arm64-gnu": "1.0.1", "@napi-rs/nice-linux-arm64-musl": "1.0.1", "@napi-rs/nice-linux-ppc64-gnu": "1.0.1", "@napi-rs/nice-linux-riscv64-gnu": "1.0.1", "@napi-rs/nice-linux-s390x-gnu": "1.0.1", "@napi-rs/nice-linux-x64-gnu": "1.0.1", "@napi-rs/nice-linux-x64-musl": "1.0.1", "@napi-rs/nice-win32-arm64-msvc": "1.0.1", "@napi-rs/nice-win32-ia32-msvc": "1.0.1", "@napi-rs/nice-win32-x64-msvc": "1.0.1" } }, "sha512-zM0mVWSXE0a0h9aKACLwKmD6nHcRiKrPpCfvaKqG1CqDEyjEawId0ocXxVzPMCAm6kkWr2P025msfxXEnt8UGQ=="], + + "@napi-rs/nice-android-arm-eabi": ["@napi-rs/nice-android-arm-eabi@1.0.1", "", { "os": "android", "cpu": "arm" }, "sha512-5qpvOu5IGwDo7MEKVqqyAxF90I6aLj4n07OzpARdgDRfz8UbBztTByBp0RC59r3J1Ij8uzYi6jI7r5Lws7nn6w=="], + + "@napi-rs/nice-android-arm64": ["@napi-rs/nice-android-arm64@1.0.1", "", { "os": "android", "cpu": "arm64" }, "sha512-GqvXL0P8fZ+mQqG1g0o4AO9hJjQaeYG84FRfZaYjyJtZZZcMjXW5TwkL8Y8UApheJgyE13TQ4YNUssQaTgTyvA=="], + + "@napi-rs/nice-darwin-arm64": ["@napi-rs/nice-darwin-arm64@1.0.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-91k3HEqUl2fsrz/sKkuEkscj6EAj3/eZNCLqzD2AA0TtVbkQi8nqxZCZDMkfklULmxLkMxuUdKe7RvG/T6s2AA=="], + + "@napi-rs/nice-darwin-x64": ["@napi-rs/nice-darwin-x64@1.0.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-jXnMleYSIR/+TAN/p5u+NkCA7yidgswx5ftqzXdD5wgy/hNR92oerTXHc0jrlBisbd7DpzoaGY4cFD7Sm5GlgQ=="], + + "@napi-rs/nice-freebsd-x64": ["@napi-rs/nice-freebsd-x64@1.0.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-j+iJ/ezONXRQsVIB/FJfwjeQXX7A2tf3gEXs4WUGFrJjpe/z2KB7sOv6zpkm08PofF36C9S7wTNuzHZ/Iiccfw=="], + + "@napi-rs/nice-linux-arm-gnueabihf": ["@napi-rs/nice-linux-arm-gnueabihf@1.0.1", "", { "os": "linux", "cpu": "arm" }, "sha512-G8RgJ8FYXYkkSGQwywAUh84m946UTn6l03/vmEXBYNJxQJcD+I3B3k5jmjFG/OPiU8DfvxutOP8bi+F89MCV7Q=="], + + "@napi-rs/nice-linux-arm64-gnu": ["@napi-rs/nice-linux-arm64-gnu@1.0.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-IMDak59/W5JSab1oZvmNbrms3mHqcreaCeClUjwlwDr0m3BoR09ZiN8cKFBzuSlXgRdZ4PNqCYNeGQv7YMTjuA=="], + + "@napi-rs/nice-linux-arm64-musl": ["@napi-rs/nice-linux-arm64-musl@1.0.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-wG8fa2VKuWM4CfjOjjRX9YLIbysSVV1S3Kgm2Fnc67ap/soHBeYZa6AGMeR5BJAylYRjnoVOzV19Cmkco3QEPw=="], + + "@napi-rs/nice-linux-ppc64-gnu": ["@napi-rs/nice-linux-ppc64-gnu@1.0.1", "", { "os": "linux", "cpu": "ppc64" }, "sha512-lxQ9WrBf0IlNTCA9oS2jg/iAjQyTI6JHzABV664LLrLA/SIdD+I1i3Mjf7TsnoUbgopBcCuDztVLfJ0q9ubf6Q=="], + + "@napi-rs/nice-linux-riscv64-gnu": ["@napi-rs/nice-linux-riscv64-gnu@1.0.1", "", { "os": "linux", "cpu": "none" }, "sha512-3xs69dO8WSWBb13KBVex+yvxmUeEsdWexxibqskzoKaWx9AIqkMbWmE2npkazJoopPKX2ULKd8Fm9veEn0g4Ig=="], + + "@napi-rs/nice-linux-s390x-gnu": ["@napi-rs/nice-linux-s390x-gnu@1.0.1", "", { "os": "linux", "cpu": "s390x" }, "sha512-lMFI3i9rlW7hgToyAzTaEybQYGbQHDrpRkg+1gJWEpH0PLAQoZ8jiY0IzakLfNWnVda1eTYYlxxFYzW8Rqczkg=="], + + "@napi-rs/nice-linux-x64-gnu": ["@napi-rs/nice-linux-x64-gnu@1.0.1", "", { "os": "linux", "cpu": "x64" }, "sha512-XQAJs7DRN2GpLN6Fb+ZdGFeYZDdGl2Fn3TmFlqEL5JorgWKrQGRUrpGKbgZ25UeZPILuTKJ+OowG2avN8mThBA=="], + + "@napi-rs/nice-linux-x64-musl": ["@napi-rs/nice-linux-x64-musl@1.0.1", "", { "os": "linux", "cpu": "x64" }, "sha512-/rodHpRSgiI9o1faq9SZOp/o2QkKQg7T+DK0R5AkbnI/YxvAIEHf2cngjYzLMQSQgUhxym+LFr+UGZx4vK4QdQ=="], + + "@napi-rs/nice-win32-arm64-msvc": ["@napi-rs/nice-win32-arm64-msvc@1.0.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-rEcz9vZymaCB3OqEXoHnp9YViLct8ugF+6uO5McifTedjq4QMQs3DHz35xBEGhH3gJWEsXMUbzazkz5KNM5YUg=="], + + "@napi-rs/nice-win32-ia32-msvc": ["@napi-rs/nice-win32-ia32-msvc@1.0.1", "", { "os": "win32", "cpu": "ia32" }, "sha512-t7eBAyPUrWL8su3gDxw9xxxqNwZzAqKo0Szv3IjVQd1GpXXVkb6vBBQUuxfIYaXMzZLwlxRQ7uzM2vdUE9ULGw=="], + + "@napi-rs/nice-win32-x64-msvc": ["@napi-rs/nice-win32-x64-msvc@1.0.1", "", { "os": "win32", "cpu": "x64" }, "sha512-JlF+uDcatt3St2ntBG8H02F1mM45i5SF9W+bIKiReVE6wiy3o16oBP/yxt+RZ+N6LbCImJXJ6bXNO2kn9AXicg=="], + + "@noble/hashes": ["@noble/hashes@1.8.0", "", {}, "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A=="], + + "@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="], + + "@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="], + + "@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="], + + "@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", "lru-cache": "^10.0.1", "socks-proxy-agent": "^8.0.3" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="], + + "@npmcli/fs": ["@npmcli/fs@4.0.0", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q=="], + + "@npmcli/git": ["@npmcli/git@6.0.3", "", { "dependencies": { "@npmcli/promise-spawn": "^8.0.0", "ini": "^5.0.0", "lru-cache": "^10.0.1", "npm-pick-manifest": "^10.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "semver": "^7.3.5", "which": "^5.0.0" } }, "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ=="], + + "@npmcli/installed-package-contents": ["@npmcli/installed-package-contents@3.0.0", "", { "dependencies": { "npm-bundled": "^4.0.0", "npm-normalize-package-bin": "^4.0.0" }, "bin": { "installed-package-contents": "bin/index.js" } }, "sha512-fkxoPuFGvxyrH+OQzyTkX2LUEamrF4jZSmxjAtPPHHGO0dqsQ8tTKjnIS8SAnPHdk2I03BDtSMR5K/4loKg79Q=="], + + "@npmcli/node-gyp": ["@npmcli/node-gyp@4.0.0", "", {}, "sha512-+t5DZ6mO/QFh78PByMq1fGSAub/agLJZDRfJRMeOSNCt8s9YVlTjmGpIPwPhvXTGUIJk+WszlT0rQa1W33yzNA=="], + + "@npmcli/package-json": ["@npmcli/package-json@6.2.0", "", { "dependencies": { "@npmcli/git": "^6.0.0", "glob": "^10.2.2", "hosted-git-info": "^8.0.0", "json-parse-even-better-errors": "^4.0.0", "proc-log": "^5.0.0", "semver": "^7.5.3", "validate-npm-package-license": "^3.0.4" } }, "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA=="], + + "@npmcli/promise-spawn": ["@npmcli/promise-spawn@8.0.2", "", { "dependencies": { "which": "^5.0.0" } }, "sha512-/bNJhjc+o6qL+Dwz/bqfTQClkEO5nTQ1ZEcdCkAQjhkZMHIh22LPG7fNh1enJP1NKWDqYiiABnjFCY7E0zHYtQ=="], + + "@npmcli/redact": ["@npmcli/redact@3.2.2", "", {}, "sha512-7VmYAmk4csGv08QzrDKScdzn11jHPFGyqJW39FyPgPuAp3zIaUmuCo1yxw9aGs+NEJuTGQ9Gwqpt93vtJubucg=="], + + "@npmcli/run-script": ["@npmcli/run-script@9.1.0", "", { "dependencies": { "@npmcli/node-gyp": "^4.0.0", "@npmcli/package-json": "^6.0.0", "@npmcli/promise-spawn": "^8.0.0", "node-gyp": "^11.0.0", "proc-log": "^5.0.0", "which": "^5.0.0" } }, "sha512-aoNSbxtkePXUlbZB+anS1LqsJdctG5n3UVhfU47+CDdwMi6uNTBMF9gPcQRnqghQd2FGzcwwIFBruFMxjhBewg=="], + + "@paralleldrive/cuid2": ["@paralleldrive/cuid2@2.2.2", "", { "dependencies": { "@noble/hashes": "^1.1.5" } }, "sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA=="], + + "@parcel/watcher": ["@parcel/watcher@2.5.1", "", { "dependencies": { "detect-libc": "^1.0.3", "is-glob": "^4.0.3", "micromatch": "^4.0.5", "node-addon-api": "^7.0.0" }, "optionalDependencies": { "@parcel/watcher-android-arm64": "2.5.1", "@parcel/watcher-darwin-arm64": "2.5.1", "@parcel/watcher-darwin-x64": "2.5.1", "@parcel/watcher-freebsd-x64": "2.5.1", "@parcel/watcher-linux-arm-glibc": "2.5.1", "@parcel/watcher-linux-arm-musl": "2.5.1", "@parcel/watcher-linux-arm64-glibc": "2.5.1", "@parcel/watcher-linux-arm64-musl": "2.5.1", "@parcel/watcher-linux-x64-glibc": "2.5.1", "@parcel/watcher-linux-x64-musl": "2.5.1", "@parcel/watcher-win32-arm64": "2.5.1", "@parcel/watcher-win32-ia32": "2.5.1", "@parcel/watcher-win32-x64": "2.5.1" } }, "sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg=="], + + "@parcel/watcher-android-arm64": ["@parcel/watcher-android-arm64@2.5.1", "", { "os": "android", "cpu": "arm64" }, "sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA=="], + + "@parcel/watcher-darwin-arm64": ["@parcel/watcher-darwin-arm64@2.5.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw=="], + + "@parcel/watcher-darwin-x64": ["@parcel/watcher-darwin-x64@2.5.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg=="], + + "@parcel/watcher-freebsd-x64": ["@parcel/watcher-freebsd-x64@2.5.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ=="], + + "@parcel/watcher-linux-arm-glibc": ["@parcel/watcher-linux-arm-glibc@2.5.1", "", { "os": "linux", "cpu": "arm" }, "sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA=="], + + "@parcel/watcher-linux-arm-musl": ["@parcel/watcher-linux-arm-musl@2.5.1", "", { "os": "linux", "cpu": "arm" }, "sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q=="], + + "@parcel/watcher-linux-arm64-glibc": ["@parcel/watcher-linux-arm64-glibc@2.5.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w=="], + + "@parcel/watcher-linux-arm64-musl": ["@parcel/watcher-linux-arm64-musl@2.5.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg=="], + + "@parcel/watcher-linux-x64-glibc": ["@parcel/watcher-linux-x64-glibc@2.5.1", "", { "os": "linux", "cpu": "x64" }, "sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A=="], + + "@parcel/watcher-linux-x64-musl": ["@parcel/watcher-linux-x64-musl@2.5.1", "", { "os": "linux", "cpu": "x64" }, "sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg=="], + + "@parcel/watcher-win32-arm64": ["@parcel/watcher-win32-arm64@2.5.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw=="], + + "@parcel/watcher-win32-ia32": ["@parcel/watcher-win32-ia32@2.5.1", "", { "os": "win32", "cpu": "ia32" }, "sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ=="], + + "@parcel/watcher-win32-x64": ["@parcel/watcher-win32-x64@2.5.1", "", { "os": "win32", "cpu": "x64" }, "sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA=="], + + "@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="], + + "@protobufjs/aspromise": ["@protobufjs/aspromise@1.1.2", "", {}, "sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ=="], + + "@protobufjs/base64": ["@protobufjs/base64@1.1.2", "", {}, "sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg=="], + + "@protobufjs/codegen": ["@protobufjs/codegen@2.0.4", "", {}, "sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg=="], + + "@protobufjs/eventemitter": ["@protobufjs/eventemitter@1.1.0", "", {}, "sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q=="], + + "@protobufjs/fetch": ["@protobufjs/fetch@1.1.0", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.1", "@protobufjs/inquire": "^1.1.0" } }, "sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ=="], + + "@protobufjs/float": ["@protobufjs/float@1.0.2", "", {}, "sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ=="], + + "@protobufjs/inquire": ["@protobufjs/inquire@1.1.0", "", {}, "sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q=="], + + "@protobufjs/path": ["@protobufjs/path@1.1.2", "", {}, "sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA=="], + + "@protobufjs/pool": ["@protobufjs/pool@1.1.0", "", {}, "sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw=="], + + "@protobufjs/utf8": ["@protobufjs/utf8@1.1.0", "", {}, "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw=="], + + "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.40.2", "", { "os": "android", "cpu": "arm" }, "sha512-JkdNEq+DFxZfUwxvB58tHMHBHVgX23ew41g1OQinthJ+ryhdRk67O31S7sYw8u2lTjHUPFxwar07BBt1KHp/hg=="], + + "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.40.2", "", { "os": "android", "cpu": "arm64" }, "sha512-13unNoZ8NzUmnndhPTkWPWbX3vtHodYmy+I9kuLxN+F+l+x3LdVF7UCu8TWVMt1POHLh6oDHhnOA04n8oJZhBw=="], + + "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.40.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-Gzf1Hn2Aoe8VZzevHostPX23U7N5+4D36WJNHK88NZHCJr7aVMG4fadqkIf72eqVPGjGc0HJHNuUaUcxiR+N/w=="], + + "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.40.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-47N4hxa01a4x6XnJoskMKTS8XZ0CZMd8YTbINbi+w03A2w4j1RTlnGHOz/P0+Bg1LaVL6ufZyNprSg+fW5nYQQ=="], + + "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.40.2", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-8t6aL4MD+rXSHHZUR1z19+9OFJ2rl1wGKvckN47XFRVO+QL/dUSpKA2SLRo4vMg7ELA8pzGpC+W9OEd1Z/ZqoQ=="], + + "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.40.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-C+AyHBzfpsOEYRFjztcYUFsH4S7UsE9cDtHCtma5BK8+ydOZYgMmWg1d/4KBytQspJCld8ZIujFMAdKG1xyr4Q=="], + + "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.40.2", "", { "os": "linux", "cpu": "arm" }, "sha512-de6TFZYIvJwRNjmW3+gaXiZ2DaWL5D5yGmSYzkdzjBDS3W+B9JQ48oZEsmMvemqjtAFzE16DIBLqd6IQQRuG9Q=="], + + "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.40.2", "", { "os": "linux", "cpu": "arm" }, "sha512-urjaEZubdIkacKc930hUDOfQPysezKla/O9qV+O89enqsqUmQm8Xj8O/vh0gHg4LYfv7Y7UsE3QjzLQzDYN1qg=="], + + "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.40.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-KlE8IC0HFOC33taNt1zR8qNlBYHj31qGT1UqWqtvR/+NuCVhfufAq9fxO8BMFC22Wu0rxOwGVWxtCMvZVLmhQg=="], + + "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.40.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-j8CgxvfM0kbnhu4XgjnCWJQyyBOeBI1Zq91Z850aUddUmPeQvuAy6OiMdPS46gNFgy8gN1xkYyLgwLYZG3rBOg=="], + + "@rollup/rollup-linux-loongarch64-gnu": ["@rollup/rollup-linux-loongarch64-gnu@4.40.2", "", { "os": "linux", "cpu": "none" }, "sha512-Ybc/1qUampKuRF4tQXc7G7QY9YRyeVSykfK36Y5Qc5dmrIxwFhrOzqaVTNoZygqZ1ZieSWTibfFhQ5qK8jpWxw=="], + + "@rollup/rollup-linux-powerpc64le-gnu": ["@rollup/rollup-linux-powerpc64le-gnu@4.40.2", "", { "os": "linux", "cpu": "ppc64" }, "sha512-3FCIrnrt03CCsZqSYAOW/k9n625pjpuMzVfeI+ZBUSDT3MVIFDSPfSUgIl9FqUftxcUXInvFah79hE1c9abD+Q=="], + + "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.40.2", "", { "os": "linux", "cpu": "none" }, "sha512-QNU7BFHEvHMp2ESSY3SozIkBPaPBDTsfVNGx3Xhv+TdvWXFGOSH2NJvhD1zKAT6AyuuErJgbdvaJhYVhVqrWTg=="], + + "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.40.2", "", { "os": "linux", "cpu": "none" }, "sha512-5W6vNYkhgfh7URiXTO1E9a0cy4fSgfE4+Hl5agb/U1sa0kjOLMLC1wObxwKxecE17j0URxuTrYZZME4/VH57Hg=="], + + "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.40.2", "", { "os": "linux", "cpu": "s390x" }, "sha512-B7LKIz+0+p348JoAL4X/YxGx9zOx3sR+o6Hj15Y3aaApNfAshK8+mWZEf759DXfRLeL2vg5LYJBB7DdcleYCoQ=="], + + "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.40.2", "", { "os": "linux", "cpu": "x64" }, "sha512-lG7Xa+BmBNwpjmVUbmyKxdQJ3Q6whHjMjzQplOs5Z+Gj7mxPtWakGHqzMqNER68G67kmCX9qX57aRsW5V0VOng=="], + + "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.40.2", "", { "os": "linux", "cpu": "x64" }, "sha512-tD46wKHd+KJvsmije4bUskNuvWKFcTOIM9tZ/RrmIvcXnbi0YK/cKS9FzFtAm7Oxi2EhV5N2OpfFB348vSQRXA=="], + + "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.40.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-Bjv/HG8RRWLNkXwQQemdsWw4Mg+IJ29LK+bJPW2SCzPKOUaMmPEppQlu/Fqk1d7+DX3V7JbFdbkh/NMmurT6Pg=="], + + "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.40.2", "", { "os": "win32", "cpu": "ia32" }, "sha512-dt1llVSGEsGKvzeIO76HToiYPNPYPkmjhMHhP00T9S4rDern8P2ZWvWAQUEJ+R1UdMWJ/42i/QqJ2WV765GZcA=="], + + "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.40.2", "", { "os": "win32", "cpu": "x64" }, "sha512-bwspbWB04XJpeElvsp+DCylKfF4trJDa2Y9Go8O6A7YLX2LIKGcNK/CYImJN6ZP4DcuOHB4Utl3iCbnR62DudA=="], + + "@schematics/angular": ["@schematics/angular@20.0.1", "", { "dependencies": { "@angular-devkit/core": "20.0.1", "@angular-devkit/schematics": "20.0.1", "jsonc-parser": "3.3.1" } }, "sha512-29T9vUAjZnbXM+vImIQcdqG/ibdcfj5+pybo5cbiMSwVPVyerXgnD0HKC4dyZ34V2RFZa8cmyCLe/5bYoPQ+0g=="], + + "@sec-ant/readable-stream": ["@sec-ant/readable-stream@0.4.1", "", {}, "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg=="], + + "@sigstore/bundle": ["@sigstore/bundle@3.1.0", "", { "dependencies": { "@sigstore/protobuf-specs": "^0.4.0" } }, "sha512-Mm1E3/CmDDCz3nDhFKTuYdB47EdRFRQMOE/EAbiG1MJW77/w1b3P7Qx7JSrVJs8PfwOLOVcKQCHErIwCTyPbag=="], + + "@sigstore/core": ["@sigstore/core@2.0.0", "", {}, "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg=="], + + "@sigstore/protobuf-specs": ["@sigstore/protobuf-specs@0.4.3", "", {}, "sha512-fk2zjD9117RL9BjqEwF7fwv7Q/P9yGsMV4MUJZ/DocaQJ6+3pKr+syBq1owU5Q5qGw5CUbXzm+4yJ2JVRDQeSA=="], + + "@sigstore/sign": ["@sigstore/sign@3.1.0", "", { "dependencies": { "@sigstore/bundle": "^3.1.0", "@sigstore/core": "^2.0.0", "@sigstore/protobuf-specs": "^0.4.0", "make-fetch-happen": "^14.0.2", "proc-log": "^5.0.0", "promise-retry": "^2.0.1" } }, "sha512-knzjmaOHOov1Ur7N/z4B1oPqZ0QX5geUfhrVaqVlu+hl0EAoL4o+l0MSULINcD5GCWe3Z0+YJO8ues6vFlW0Yw=="], + + "@sigstore/tuf": ["@sigstore/tuf@3.1.1", "", { "dependencies": { "@sigstore/protobuf-specs": "^0.4.1", "tuf-js": "^3.0.1" } }, "sha512-eFFvlcBIoGwVkkwmTi/vEQFSva3xs5Ot3WmBcjgjVdiaoelBLQaQ/ZBfhlG0MnG0cmTYScPpk7eDdGDWUcFUmg=="], + + "@sigstore/verify": ["@sigstore/verify@2.1.1", "", { "dependencies": { "@sigstore/bundle": "^3.1.0", "@sigstore/core": "^2.0.0", "@sigstore/protobuf-specs": "^0.4.1" } }, "sha512-hVJD77oT67aowHxwT4+M6PGOp+E2LtLdTK3+FC0lBO9T7sYwItDMXZ7Z07IDCvR1M717a4axbIWckrW67KMP/w=="], + + "@sindresorhus/is": ["@sindresorhus/is@7.0.2", "", {}, "sha512-d9xRovfKNz1SKieM0qJdO+PQonjnnIfSNWfHYnBSJ9hkjm0ZPw6HlxscDXYstp3z+7V2GOFHc+J0CYrYTjqCJw=="], + + "@socket.io/component-emitter": ["@socket.io/component-emitter@3.1.2", "", {}, "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA=="], + + "@stock-bot/cache": ["@stock-bot/cache@workspace:libs/cache"], + + "@stock-bot/config": ["@stock-bot/config@workspace:libs/config"], + + "@stock-bot/data-adjustments": ["@stock-bot/data-adjustments@workspace:libs/data-adjustments"], + + "@stock-bot/data-frame": ["@stock-bot/data-frame@workspace:libs/data-frame"], + + "@stock-bot/data-service": ["@stock-bot/data-service@workspace:apps/data-service"], + + "@stock-bot/event-bus": ["@stock-bot/event-bus@workspace:libs/event-bus"], + + "@stock-bot/execution-service": ["@stock-bot/execution-service@workspace:apps/execution-service"], + + "@stock-bot/http": ["@stock-bot/http@workspace:libs/http"], + + "@stock-bot/logger": ["@stock-bot/logger@workspace:libs/logger"], + + "@stock-bot/mongodb-client": ["@stock-bot/mongodb-client@workspace:libs/mongodb-client"], + + "@stock-bot/portfolio-service": ["@stock-bot/portfolio-service@workspace:apps/portfolio-service"], + + "@stock-bot/postgres-client": ["@stock-bot/postgres-client@workspace:libs/postgres-client"], + + "@stock-bot/processing-service": ["@stock-bot/processing-service@workspace:apps/processing-service"], + + "@stock-bot/questdb-client": ["@stock-bot/questdb-client@workspace:libs/questdb-client"], + + "@stock-bot/shutdown": ["@stock-bot/shutdown@workspace:libs/shutdown"], + + "@stock-bot/strategy-engine": ["@stock-bot/strategy-engine@workspace:libs/strategy-engine"], + + "@stock-bot/strategy-service": ["@stock-bot/strategy-service@workspace:apps/strategy-service"], + + "@stock-bot/types": ["@stock-bot/types@workspace:libs/types"], + + "@stock-bot/utils": ["@stock-bot/utils@workspace:libs/utils"], + + "@stock-bot/vector-engine": ["@stock-bot/vector-engine@workspace:libs/vector-engine"], + + "@szmarczak/http-timer": ["@szmarczak/http-timer@5.0.1", "", { "dependencies": { "defer-to-connect": "^2.0.1" } }, "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw=="], + + "@tailwindcss/node": ["@tailwindcss/node@4.1.8", "", { "dependencies": { "@ampproject/remapping": "^2.3.0", "enhanced-resolve": "^5.18.1", "jiti": "^2.4.2", "lightningcss": "1.30.1", "magic-string": "^0.30.17", "source-map-js": "^1.2.1", "tailwindcss": "4.1.8" } }, "sha512-OWwBsbC9BFAJelmnNcrKuf+bka2ZxCE2A4Ft53Tkg4uoiE67r/PMEYwCsourC26E+kmxfwE0hVzMdxqeW+xu7Q=="], + + "@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.8", "", { "dependencies": { "detect-libc": "^2.0.4", "tar": "^7.4.3" }, "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.8", "@tailwindcss/oxide-darwin-arm64": "4.1.8", "@tailwindcss/oxide-darwin-x64": "4.1.8", "@tailwindcss/oxide-freebsd-x64": "4.1.8", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.8", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.8", "@tailwindcss/oxide-linux-arm64-musl": "4.1.8", "@tailwindcss/oxide-linux-x64-gnu": "4.1.8", "@tailwindcss/oxide-linux-x64-musl": "4.1.8", "@tailwindcss/oxide-wasm32-wasi": "4.1.8", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.8", "@tailwindcss/oxide-win32-x64-msvc": "4.1.8" } }, "sha512-d7qvv9PsM5N3VNKhwVUhpK6r4h9wtLkJ6lz9ZY9aeZgrUWk1Z8VPyqyDT9MZlem7GTGseRQHkeB1j3tC7W1P+A=="], + + "@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.8", "", { "os": "android", "cpu": "arm64" }, "sha512-Fbz7qni62uKYceWYvUjRqhGfZKwhZDQhlrJKGtnZfuNtHFqa8wmr+Wn74CTWERiW2hn3mN5gTpOoxWKk0jRxjg=="], + + "@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.8", "", { "os": "darwin", "cpu": "arm64" }, "sha512-RdRvedGsT0vwVVDztvyXhKpsU2ark/BjgG0huo4+2BluxdXo8NDgzl77qh0T1nUxmM11eXwR8jA39ibvSTbi7A=="], + + "@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.8", "", { "os": "darwin", "cpu": "x64" }, "sha512-t6PgxjEMLp5Ovf7uMb2OFmb3kqzVTPPakWpBIFzppk4JE4ix0yEtbtSjPbU8+PZETpaYMtXvss2Sdkx8Vs4XRw=="], + + "@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.8", "", { "os": "freebsd", "cpu": "x64" }, "sha512-g8C8eGEyhHTqwPStSwZNSrOlyx0bhK/V/+zX0Y+n7DoRUzyS8eMbVshVOLJTDDC+Qn9IJnilYbIKzpB9n4aBsg=="], + + "@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.8", "", { "os": "linux", "cpu": "arm" }, "sha512-Jmzr3FA4S2tHhaC6yCjac3rGf7hG9R6Gf2z9i9JFcuyy0u79HfQsh/thifbYTF2ic82KJovKKkIB6Z9TdNhCXQ=="], + + "@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.8", "", { "os": "linux", "cpu": "arm64" }, "sha512-qq7jXtO1+UEtCmCeBBIRDrPFIVI4ilEQ97qgBGdwXAARrUqSn/L9fUrkb1XP/mvVtoVeR2bt/0L77xx53bPZ/Q=="], + + "@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.8", "", { "os": "linux", "cpu": "arm64" }, "sha512-O6b8QesPbJCRshsNApsOIpzKt3ztG35gfX9tEf4arD7mwNinsoCKxkj8TgEE0YRjmjtO3r9FlJnT/ENd9EVefQ=="], + + "@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.8", "", { "os": "linux", "cpu": "x64" }, "sha512-32iEXX/pXwikshNOGnERAFwFSfiltmijMIAbUhnNyjFr3tmWmMJWQKU2vNcFX0DACSXJ3ZWcSkzNbaKTdngH6g=="], + + "@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.8", "", { "os": "linux", "cpu": "x64" }, "sha512-s+VSSD+TfZeMEsCaFaHTaY5YNj3Dri8rST09gMvYQKwPphacRG7wbuQ5ZJMIJXN/puxPcg/nU+ucvWguPpvBDg=="], + + "@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.8", "", { "dependencies": { "@emnapi/core": "^1.4.3", "@emnapi/runtime": "^1.4.3", "@emnapi/wasi-threads": "^1.0.2", "@napi-rs/wasm-runtime": "^0.2.10", "@tybys/wasm-util": "^0.9.0", "tslib": "^2.8.0" }, "cpu": "none" }, "sha512-CXBPVFkpDjM67sS1psWohZ6g/2/cd+cq56vPxK4JeawelxwK4YECgl9Y9TjkE2qfF+9/s1tHHJqrC4SS6cVvSg=="], + + "@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.8", "", { "os": "win32", "cpu": "arm64" }, "sha512-7GmYk1n28teDHUjPlIx4Z6Z4hHEgvP5ZW2QS9ygnDAdI/myh3HTHjDqtSqgu1BpRoI4OiLx+fThAyA1JePoENA=="], + + "@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.8", "", { "os": "win32", "cpu": "x64" }, "sha512-fou+U20j+Jl0EHwK92spoWISON2OBnCazIc038Xj2TdweYV33ZRkS9nwqiUi2d/Wba5xg5UoHfvynnb/UB49cQ=="], + + "@tailwindcss/postcss": ["@tailwindcss/postcss@4.1.8", "", { "dependencies": { "@alloc/quick-lru": "^5.2.0", "@tailwindcss/node": "4.1.8", "@tailwindcss/oxide": "4.1.8", "postcss": "^8.4.41", "tailwindcss": "4.1.8" } }, "sha512-vB/vlf7rIky+w94aWMw34bWW1ka6g6C3xIOdICKX2GC0VcLtL6fhlLiafF0DVIwa9V6EHz8kbWMkS2s2QvvNlw=="], + + "@testcontainers/mongodb": ["@testcontainers/mongodb@10.28.0", "", { "dependencies": { "testcontainers": "^10.28.0" } }, "sha512-78h6n2jnFOQ8IfPjgL1+vsHuEeA0itclEOpx9kkQR+FOWnwJN9AeeX6+rMmZCtRgTsr5wT0BvfFoDssMkDqWaQ=="], + + "@testcontainers/postgresql": ["@testcontainers/postgresql@10.28.0", "", { "dependencies": { "testcontainers": "^10.28.0" } }, "sha512-NN25rruG5D4Q7pCNIJuHwB+G85OSeJ3xHZ2fWx0O6sPoPEfCYwvpj8mq99cyn68nxFkFYZeyrZJtSFO+FnydiA=="], + + "@tufjs/canonical-json": ["@tufjs/canonical-json@2.0.0", "", {}, "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA=="], + + "@tufjs/models": ["@tufjs/models@3.0.1", "", { "dependencies": { "@tufjs/canonical-json": "2.0.0", "minimatch": "^9.0.5" } }, "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA=="], + + "@types/bun": ["@types/bun@1.2.15", "", { "dependencies": { "bun-types": "1.2.15" } }, "sha512-U1ljPdBEphF0nw1MIk0hI7kPg7dFdPyM7EenHsp6W5loNHl7zqy6JQf/RKCgnUn2KDzUpkBwHPnEJEjII594bA=="], + + "@types/cookiejar": ["@types/cookiejar@2.1.5", "", {}, "sha512-he+DHOWReW0nghN24E1WUqM0efK4kI9oTqDm6XmK8ZPe2djZ90BSNdGnIyCLzCPw7/pogPlGbzI2wHGGmi4O/Q=="], + + "@types/cors": ["@types/cors@2.8.19", "", { "dependencies": { "@types/node": "*" } }, "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg=="], + + "@types/docker-modem": ["@types/docker-modem@3.0.6", "", { "dependencies": { "@types/node": "*", "@types/ssh2": "*" } }, "sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg=="], + + "@types/dockerode": ["@types/dockerode@3.3.40", "", { "dependencies": { "@types/docker-modem": "*", "@types/node": "*", "@types/ssh2": "*" } }, "sha512-O1ckSFYbcYv/KcnAHMLCnKQYY8/5+6CRzpsOPcQIePHRX2jG4Gmz8uXPMCXIxTGN9OYkE5eox/L67l2sGY1UYg=="], + + "@types/estree": ["@types/estree@1.0.7", "", {}, "sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ=="], + + "@types/http-cache-semantics": ["@types/http-cache-semantics@4.0.4", "", {}, "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA=="], + + "@types/jasmine": ["@types/jasmine@5.1.8", "", {}, "sha512-u7/CnvRdh6AaaIzYjCgUuVbREFgulhX05Qtf6ZtW+aOcjCKKVvKgpkPYJBFTZSHtFBYimzU4zP0V2vrEsq9Wcg=="], + + "@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="], + + "@types/methods": ["@types/methods@1.1.4", "", {}, "sha512-ymXWVrDiCxTBE3+RIrrP533E70eA+9qu7zdWoHuOmGujkYtzf4HQF96b8nwHLqhuf4ykX61IGRIB38CC6/sImQ=="], + + "@types/node": ["@types/node@22.15.30", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-6Q7lr06bEHdlfplU6YRbgG1SFBdlsfNC4/lX+SkhiTs0cpJkOElmWls8PxDFv4yY/xKb8Y6SO0OmSX4wgqTZbA=="], + + "@types/pg": ["@types/pg@8.15.4", "", { "dependencies": { "@types/node": "*", "pg-protocol": "*", "pg-types": "^2.2.0" } }, "sha512-I6UNVBAoYbvuWkkU3oosC8yxqH21f4/Jc4DK71JLG3dT2mdlGe1z+ep/LQGXaKaOgcvUrsQoPRqfgtMcvZiJhg=="], + + "@types/semver": ["@types/semver@7.7.0", "", {}, "sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA=="], + + "@types/ssh2": ["@types/ssh2@0.5.52", "", { "dependencies": { "@types/node": "*", "@types/ssh2-streams": "*" } }, "sha512-lbLLlXxdCZOSJMCInKH2+9V/77ET2J6NPQHpFI0kda61Dd1KglJs+fPQBchizmzYSOJBgdTajhPqBO1xxLywvg=="], + + "@types/ssh2-streams": ["@types/ssh2-streams@0.1.12", "", { "dependencies": { "@types/node": "*" } }, "sha512-Sy8tpEmCce4Tq0oSOYdfqaBpA3hDM8SoxoFh5vzFsu2oL+znzGz8oVWW7xb4K920yYMUY+PIG31qZnFMfPWNCg=="], + + "@types/superagent": ["@types/superagent@8.1.9", "", { "dependencies": { "@types/cookiejar": "^2.1.5", "@types/methods": "^1.1.4", "@types/node": "*", "form-data": "^4.0.0" } }, "sha512-pTVjI73witn+9ILmoJdajHGW2jkSaOzhiFYF1Rd3EQ94kymLqB9PjD9ISg7WaALC7+dCHT0FGe9T2LktLq/3GQ=="], + + "@types/supertest": ["@types/supertest@6.0.3", "", { "dependencies": { "@types/methods": "^1.1.4", "@types/superagent": "^8.1.0" } }, "sha512-8WzXq62EXFhJ7QsH3Ocb/iKQ/Ty9ZVWnVzoTKc9tyyFRRF3a74Tk2+TLFgaFFw364Ere+npzHKEJ6ga2LzIL7w=="], + + "@types/webidl-conversions": ["@types/webidl-conversions@7.0.3", "", {}, "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA=="], + + "@types/whatwg-url": ["@types/whatwg-url@11.0.5", "", { "dependencies": { "@types/webidl-conversions": "*" } }, "sha512-coYR071JRaHa+xoEvvYqvnIHaVqaYrLPbsufM9BF63HkwI5Lgmy2QR8Q5K/lYDYo5AK82wOvSOS0UsLTpTG7uQ=="], + + "@types/ws": ["@types/ws@8.18.1", "", { "dependencies": { "@types/node": "*" } }, "sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg=="], + + "@types/yup": ["@types/yup@0.32.0", "", { "dependencies": { "yup": "*" } }, "sha512-Gr2lllWTDxGVYHgWfL8szjdedERpNgm44L9BDL2cmcHG7Bfd6taEpiW3ayMFLaYvlJr/6bFXDJdh6L406AGlFg=="], + + "@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@6.21.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.5.1", "@typescript-eslint/scope-manager": "6.21.0", "@typescript-eslint/type-utils": "6.21.0", "@typescript-eslint/utils": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", "natural-compare": "^1.4.0", "semver": "^7.5.4", "ts-api-utils": "^1.0.1" }, "peerDependencies": { "@typescript-eslint/parser": "^6.0.0 || ^6.0.0-alpha", "eslint": "^7.0.0 || ^8.0.0" } }, "sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA=="], + + "@typescript-eslint/parser": ["@typescript-eslint/parser@6.21.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "6.21.0", "@typescript-eslint/types": "6.21.0", "@typescript-eslint/typescript-estree": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^7.0.0 || ^8.0.0" } }, "sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ=="], + + "@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@6.21.0", "", { "dependencies": { "@typescript-eslint/types": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0" } }, "sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg=="], + + "@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@6.21.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "6.21.0", "@typescript-eslint/utils": "6.21.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, "peerDependencies": { "eslint": "^7.0.0 || ^8.0.0" } }, "sha512-rZQI7wHfao8qMX3Rd3xqeYSMCL3SoiSQLBATSiVKARdFGCYSRvmViieZjqc58jKgs8Y8i9YvVVhRbHSTA4VBag=="], + + "@typescript-eslint/types": ["@typescript-eslint/types@6.21.0", "", {}, "sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg=="], + + "@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@6.21.0", "", { "dependencies": { "@typescript-eslint/types": "6.21.0", "@typescript-eslint/visitor-keys": "6.21.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", "minimatch": "9.0.3", "semver": "^7.5.4", "ts-api-utils": "^1.0.1" } }, "sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ=="], + + "@typescript-eslint/utils": ["@typescript-eslint/utils@6.21.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", "@typescript-eslint/scope-manager": "6.21.0", "@typescript-eslint/types": "6.21.0", "@typescript-eslint/typescript-estree": "6.21.0", "semver": "^7.5.4" }, "peerDependencies": { "eslint": "^7.0.0 || ^8.0.0" } }, "sha512-NfWVaC8HP9T8cbKQxHcsJBY5YE1O33+jpMwN45qzWWaPDZgLIbo12toGMWnmhvCpd3sIxkpDw3Wv1B3dYrbDQQ=="], + + "@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@6.21.0", "", { "dependencies": { "@typescript-eslint/types": "6.21.0", "eslint-visitor-keys": "^3.4.1" } }, "sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A=="], + + "@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="], + + "@vitejs/plugin-basic-ssl": ["@vitejs/plugin-basic-ssl@2.0.0", "", { "peerDependencies": { "vite": "^6.0.0" } }, "sha512-gc9Tjg8bUxBVSTzeWT3Njc0Cl3PakHFKdNfABnZWiUgbxqmHDEn7uECv3fHVylxoYgNzAcmU7ZrILz+BwSo3sA=="], + + "@yarnpkg/lockfile": ["@yarnpkg/lockfile@1.1.0", "", {}, "sha512-GpSwvyXOcOOlV70vbnzjj4fW5xW/FdUF6nQEt1ENy7m4ZCczi1+/buVUPAqmGfqznsORNFzUMjctTIp8a9tuCQ=="], + + "abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="], + + "abort-controller": ["abort-controller@3.0.0", "", { "dependencies": { "event-target-shim": "^5.0.0" } }, "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg=="], + + "accepts": ["accepts@1.3.8", "", { "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw=="], + + "acorn": ["acorn@8.15.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="], + + "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="], + + "agent-base": ["agent-base@7.1.3", "", {}, "sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw=="], + + "ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="], + + "ajv-formats": ["ajv-formats@3.0.1", "", { "dependencies": { "ajv": "^8.0.0" } }, "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ=="], + + "ansi-escapes": ["ansi-escapes@4.3.2", "", { "dependencies": { "type-fest": "^0.21.3" } }, "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ=="], + + "ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="], + + "ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="], + + "anymatch": ["anymatch@3.1.3", "", { "dependencies": { "normalize-path": "^3.0.0", "picomatch": "^2.0.4" } }, "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="], + + "archiver": ["archiver@7.0.1", "", { "dependencies": { "archiver-utils": "^5.0.2", "async": "^3.2.4", "buffer-crc32": "^1.0.0", "readable-stream": "^4.0.0", "readdir-glob": "^1.1.2", "tar-stream": "^3.0.0", "zip-stream": "^6.0.1" } }, "sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ=="], + + "archiver-utils": ["archiver-utils@5.0.2", "", { "dependencies": { "glob": "^10.0.0", "graceful-fs": "^4.2.0", "is-stream": "^2.0.1", "lazystream": "^1.0.0", "lodash": "^4.17.15", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" } }, "sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA=="], + + "argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="], + + "array-union": ["array-union@2.1.0", "", {}, "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw=="], + + "asap": ["asap@2.0.6", "", {}, "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA=="], + + "asn1": ["asn1@0.2.6", "", { "dependencies": { "safer-buffer": "~2.1.0" } }, "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ=="], + + "async": ["async@3.2.6", "", {}, "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="], + + "async-lock": ["async-lock@1.4.1", "", {}, "sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ=="], + + "async-mutex": ["async-mutex@0.4.1", "", { "dependencies": { "tslib": "^2.4.0" } }, "sha512-WfoBo4E/TbCX1G95XTjbWTE3X2XLG0m1Xbv2cwOtuPdyH9CZvnaA5nCt1ucjaKEgW2A5IF71hxrRhr83Je5xjA=="], + + "asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="], + + "atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="], + + "autoprefixer": ["autoprefixer@10.4.21", "", { "dependencies": { "browserslist": "^4.24.4", "caniuse-lite": "^1.0.30001702", "fraction.js": "^4.3.7", "normalize-range": "^0.1.2", "picocolors": "^1.1.1", "postcss-value-parser": "^4.2.0" }, "peerDependencies": { "postcss": "^8.1.0" }, "bin": { "autoprefixer": "bin/autoprefixer" } }, "sha512-O+A6LWV5LDHSJD3LjHYoNi4VLsj/Whi7k6zG12xTYaU4cQ8oxQGckXNX8cRHK5yOZ/ppVHe0ZBXGzSV9jXdVbQ=="], + + "axios": ["axios@1.9.0", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-re4CqKTJaURpzbLHtIi6XpDv20/CnpXOtjRY5/CU32L8gU8ek9UIivcfvSWvmKEngmVbrUtPpdDwWDWL7DNHvg=="], + + "b4a": ["b4a@1.6.7", "", {}, "sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg=="], + + "balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="], + + "bare-events": ["bare-events@2.5.4", "", {}, "sha512-+gFfDkR8pj4/TrWCGUGWmJIkBwuxPS5F+a5yWjOHQt2hHvNZd5YLzadjmDUtFmMM4y429bnKLa8bYBMHcYdnQA=="], + + "bare-fs": ["bare-fs@4.1.5", "", { "dependencies": { "bare-events": "^2.5.4", "bare-path": "^3.0.0", "bare-stream": "^2.6.4" }, "peerDependencies": { "bare-buffer": "*" }, "optionalPeers": ["bare-buffer"] }, "sha512-1zccWBMypln0jEE05LzZt+V/8y8AQsQQqxtklqaIyg5nu6OAYFhZxPXinJTSG+kU5qyNmeLgcn9AW7eHiCHVLA=="], + + "bare-os": ["bare-os@3.6.1", "", {}, "sha512-uaIjxokhFidJP+bmmvKSgiMzj2sV5GPHaZVAIktcxcpCyBFFWO+YlikVAdhmUo2vYFvFhOXIAlldqV29L8126g=="], + + "bare-path": ["bare-path@3.0.0", "", { "dependencies": { "bare-os": "^3.0.1" } }, "sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw=="], + + "bare-stream": ["bare-stream@2.6.5", "", { "dependencies": { "streamx": "^2.21.0" }, "peerDependencies": { "bare-buffer": "*", "bare-events": "*" }, "optionalPeers": ["bare-buffer", "bare-events"] }, "sha512-jSmxKJNJmHySi6hC42zlZnq00rga4jjxcgNZjY9N5WlOe/iOoGRtdwGsHzQv2RlH2KOYMwGUXhf2zXd32BA9RA=="], + + "base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="], + + "base64id": ["base64id@2.0.0", "", {}, "sha512-lGe34o6EHj9y3Kts9R4ZYs/Gr+6N7MCaMlIFA3F1R2O5/m7K06AxfSeO5530PEERE6/WyEg3lsuyw4GHlPZHog=="], + + "bcrypt-pbkdf": ["bcrypt-pbkdf@1.0.2", "", { "dependencies": { "tweetnacl": "^0.14.3" } }, "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w=="], + + "beasties": ["beasties@0.3.4", "", { "dependencies": { "css-select": "^5.1.0", "css-what": "^6.1.0", "dom-serializer": "^2.0.0", "domhandler": "^5.0.3", "htmlparser2": "^10.0.0", "picocolors": "^1.1.1", "postcss": "^8.4.49", "postcss-media-query-parser": "^0.2.3" } }, "sha512-NmzN1zN1cvGccXFyZ73335+ASXwBlVWcUPssiUDIlFdfyatHPRRufjCd5w8oPaQPvVnf9ELklaCGb1gi9FBwIw=="], + + "binary-extensions": ["binary-extensions@2.3.0", "", {}, "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="], + + "bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="], + + "body-parser": ["body-parser@1.20.3", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g=="], + + "boolbase": ["boolbase@1.0.0", "", {}, "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww=="], + + "brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="], + + "braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="], + + "browserslist": ["browserslist@4.25.0", "", { "dependencies": { "caniuse-lite": "^1.0.30001718", "electron-to-chromium": "^1.5.160", "node-releases": "^2.0.19", "update-browserslist-db": "^1.1.3" }, "bin": { "browserslist": "cli.js" } }, "sha512-PJ8gYKeS5e/whHBh8xrwYK+dAvEj7JXtz6uTucnMRB8OiGTsKccFekoRrjajPBHV8oOY+2tI4uxeceSimKwMFA=="], + + "bson": ["bson@6.10.4", "", {}, "sha512-WIsKqkSC0ABoBJuT1LEX+2HEvNmNKKgnTAyd0fL8qzK4SH2i9NXg+t08YtdZp/V9IZ33cxe3iV4yM0qg8lMQng=="], + + "buffer": ["buffer@6.0.3", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" } }, "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA=="], + + "buffer-crc32": ["buffer-crc32@1.0.0", "", {}, "sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w=="], + + "buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="], + + "buildcheck": ["buildcheck@0.0.6", "", {}, "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A=="], + + "bullmq": ["bullmq@5.53.2", "", { "dependencies": { "cron-parser": "^4.9.0", "ioredis": "^5.4.1", "msgpackr": "^1.11.2", "node-abort-controller": "^3.1.1", "semver": "^7.5.4", "tslib": "^2.0.0", "uuid": "^9.0.0" } }, "sha512-xHgxrP/yNJHD7VCw1h+eRBh+2TCPBCM39uC9gCyksYc6ufcJP+HTZ/A2lzB2x7qMFWrvsX7tM40AT2BmdkYL/Q=="], + + "bun-types": ["bun-types@1.2.15", "", { "dependencies": { "@types/node": "*" } }, "sha512-NarRIaS+iOaQU1JPfyKhZm4AsUOrwUOqRNHY0XxI8GI8jYxiLXLcdjYMG9UKS+fwWasc1uw1htV9AX24dD+p4w=="], + + "byline": ["byline@5.0.0", "", {}, "sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q=="], + + "bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="], + + "cacache": ["cacache@19.0.1", "", { "dependencies": { "@npmcli/fs": "^4.0.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^7.0.2", "ssri": "^12.0.0", "tar": "^7.4.3", "unique-filename": "^4.0.0" } }, "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ=="], + + "cacheable-lookup": ["cacheable-lookup@7.0.0", "", {}, "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w=="], + + "cacheable-request": ["cacheable-request@12.0.1", "", { "dependencies": { "@types/http-cache-semantics": "^4.0.4", "get-stream": "^9.0.1", "http-cache-semantics": "^4.1.1", "keyv": "^4.5.4", "mimic-response": "^4.0.0", "normalize-url": "^8.0.1", "responselike": "^3.0.0" } }, "sha512-Yo9wGIQUaAfIbk+qY0X4cDQgCosecfBe3V9NSyeY4qPC2SAkbCS4Xj79VP8WOzitpJUZKc/wsRCYF5ariDIwkg=="], + + "call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="], + + "call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="], + + "call-bound": ["call-bound@1.0.4", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "get-intrinsic": "^1.3.0" } }, "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg=="], + + "callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="], + + "camelcase": ["camelcase@6.3.0", "", {}, "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA=="], + + "caniuse-lite": ["caniuse-lite@1.0.30001721", "", {}, "sha512-cOuvmUVtKrtEaoKiO0rSc29jcjwMwX5tOHDy4MgVFEWiUXj4uBMJkwI8MDySkgXidpMiHUcviogAvFi4pA2hDQ=="], + + "chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="], + + "chardet": ["chardet@0.7.0", "", {}, "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA=="], + + "chokidar": ["chokidar@4.0.3", "", { "dependencies": { "readdirp": "^4.0.1" } }, "sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="], + + "chownr": ["chownr@2.0.0", "", {}, "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ=="], + + "cli-cursor": ["cli-cursor@5.0.0", "", { "dependencies": { "restore-cursor": "^5.0.0" } }, "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw=="], + + "cli-spinners": ["cli-spinners@2.9.2", "", {}, "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg=="], + + "cli-truncate": ["cli-truncate@4.0.0", "", { "dependencies": { "slice-ansi": "^5.0.0", "string-width": "^7.0.0" } }, "sha512-nPdaFdQ0h/GEigbPClz11D0v/ZJEwxmeVZGeMo3Z5StPtUTkA9o1lD6QwoirYiSDzbcwn2XcjwmCp68W1IS4TA=="], + + "cli-width": ["cli-width@4.1.0", "", {}, "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ=="], + + "cliui": ["cliui@8.0.1", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.1", "wrap-ansi": "^7.0.0" } }, "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="], + + "cluster-key-slot": ["cluster-key-slot@1.1.2", "", {}, "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA=="], + + "color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="], + + "color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="], + + "colorette": ["colorette@2.0.20", "", {}, "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w=="], + + "combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="], + + "commander": ["commander@14.0.0", "", {}, "sha512-2uM9rYjPvyq39NwLRqaiLtWHyDC1FvryJDa2ATTVims5YAS4PupsEQsDvP14FqhFr0P49CYDugi59xaxJlTXRA=="], + + "commondir": ["commondir@1.0.1", "", {}, "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg=="], + + "component-emitter": ["component-emitter@1.3.1", "", {}, "sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ=="], + + "compress-commons": ["compress-commons@6.0.2", "", { "dependencies": { "crc-32": "^1.2.0", "crc32-stream": "^6.0.0", "is-stream": "^2.0.1", "normalize-path": "^3.0.0", "readable-stream": "^4.0.0" } }, "sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg=="], + + "concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="], + + "connect": ["connect@3.7.0", "", { "dependencies": { "debug": "2.6.9", "finalhandler": "1.1.2", "parseurl": "~1.3.3", "utils-merge": "1.0.1" } }, "sha512-ZqRXc+tZukToSNmh5C2iWMSoV3X1YUcPbqEM4DkEG5tNQXrQUZCNVGGv3IuicnkMtPfGf3Xtp8WCXs295iQ1pQ=="], + + "content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="], + + "convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="], + + "cookie": ["cookie@0.7.2", "", {}, "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="], + + "cookiejar": ["cookiejar@2.1.4", "", {}, "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw=="], + + "core-util-is": ["core-util-is@1.0.3", "", {}, "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="], + + "cors": ["cors@2.8.5", "", { "dependencies": { "object-assign": "^4", "vary": "^1" } }, "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g=="], + + "cpu-features": ["cpu-features@0.0.10", "", { "dependencies": { "buildcheck": "~0.0.6", "nan": "^2.19.0" } }, "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA=="], + + "crc-32": ["crc-32@1.2.2", "", { "bin": { "crc32": "bin/crc32.njs" } }, "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ=="], + + "crc32-stream": ["crc32-stream@6.0.0", "", { "dependencies": { "crc-32": "^1.2.0", "readable-stream": "^4.0.0" } }, "sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g=="], + + "cron-parser": ["cron-parser@4.9.0", "", { "dependencies": { "luxon": "^3.2.1" } }, "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q=="], + + "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="], + + "css-select": ["css-select@5.1.0", "", { "dependencies": { "boolbase": "^1.0.0", "css-what": "^6.1.0", "domhandler": "^5.0.2", "domutils": "^3.0.1", "nth-check": "^2.0.1" } }, "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg=="], + + "css-what": ["css-what@6.1.0", "", {}, "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw=="], + + "custom-event": ["custom-event@1.0.1", "", {}, "sha512-GAj5FOq0Hd+RsCGVJxZuKaIDXDf3h6GQoNEjFgbLLI/trgtavwUbSnZ5pVfg27DVCaWjIohryS0JFwIJyT2cMg=="], + + "date-fns": ["date-fns@2.30.0", "", { "dependencies": { "@babel/runtime": "^7.21.0" } }, "sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw=="], + + "date-format": ["date-format@4.0.14", "", {}, "sha512-39BOQLs9ZjKh0/patS9nrT8wc3ioX3/eA/zgbKNopnF2wCqJEoxywwwElATYvRsXdnOxA/OQeQoFZ3rFjVajhg=="], + + "dateformat": ["dateformat@4.6.3", "", {}, "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA=="], + + "debug": ["debug@4.4.1", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ=="], + + "decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="], + + "deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="], + + "defer-to-connect": ["defer-to-connect@2.0.1", "", {}, "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg=="], + + "define-data-property": ["define-data-property@1.1.4", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.0.1" } }, "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A=="], + + "delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="], + + "denque": ["denque@2.1.0", "", {}, "sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw=="], + + "depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="], + + "destroy": ["destroy@1.2.0", "", {}, "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="], + + "detect-libc": ["detect-libc@2.0.4", "", {}, "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA=="], + + "dezalgo": ["dezalgo@1.0.4", "", { "dependencies": { "asap": "^2.0.0", "wrappy": "1" } }, "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig=="], + + "di": ["di@0.0.1", "", {}, "sha512-uJaamHkagcZtHPqCIHZxnFrXlunQXgBOsZSUOWwFw31QJCAbyTBoHMW75YOTur5ZNx8pIeAKgf6GWIgaqqiLhA=="], + + "dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="], + + "discontinuous-range": ["discontinuous-range@1.0.0", "", {}, "sha512-c68LpLbO+7kP/b1Hr1qs8/BJ09F5khZGTxqxZuhzxpmwJKOgRFHJWIb9/KmqnqHhLdO55aOxFH/EGBvUQbL/RQ=="], + + "docker-compose": ["docker-compose@0.24.8", "", { "dependencies": { "yaml": "^2.2.2" } }, "sha512-plizRs/Vf15H+GCVxq2EUvyPK7ei9b/cVesHvjnX4xaXjM9spHe2Ytq0BitndFgvTJ3E3NljPNUEl7BAN43iZw=="], + + "docker-modem": ["docker-modem@5.0.6", "", { "dependencies": { "debug": "^4.1.1", "readable-stream": "^3.5.0", "split-ca": "^1.0.1", "ssh2": "^1.15.0" } }, "sha512-ens7BiayssQz/uAxGzH8zGXCtiV24rRWXdjNha5V4zSOcxmAZsfGVm/PPFbwQdqEkDnhG+SyR9E3zSHUbOKXBQ=="], + + "dockerode": ["dockerode@4.0.7", "", { "dependencies": { "@balena/dockerignore": "^1.0.2", "@grpc/grpc-js": "^1.11.1", "@grpc/proto-loader": "^0.7.13", "docker-modem": "^5.0.6", "protobufjs": "^7.3.2", "tar-fs": "~2.1.2", "uuid": "^10.0.0" } }, "sha512-R+rgrSRTRdU5mH14PZTCPZtW/zw3HDWNTS/1ZAQpL/5Upe/ye5K9WQkIysu4wBoiMwKynsz0a8qWuGsHgEvSAA=="], + + "doctrine": ["doctrine@3.0.0", "", { "dependencies": { "esutils": "^2.0.2" } }, "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w=="], + + "dom-serialize": ["dom-serialize@2.2.1", "", { "dependencies": { "custom-event": "~1.0.0", "ent": "~2.2.0", "extend": "^3.0.0", "void-elements": "^2.0.0" } }, "sha512-Yra4DbvoW7/Z6LBN560ZwXMjoNOSAN2wRsKFGc4iBeso+mpIA6qj1vfdf9HpMaKAqG6wXTy+1SYEzmNpKXOSsQ=="], + + "dom-serializer": ["dom-serializer@2.0.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "entities": "^4.2.0" } }, "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg=="], + + "domelementtype": ["domelementtype@2.3.0", "", {}, "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw=="], + + "domhandler": ["domhandler@5.0.3", "", { "dependencies": { "domelementtype": "^2.3.0" } }, "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w=="], + + "domutils": ["domutils@3.2.2", "", { "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3" } }, "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw=="], + + "dotenv": ["dotenv@16.5.0", "", {}, "sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg=="], + + "dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="], + + "eastasianwidth": ["eastasianwidth@0.2.0", "", {}, "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="], + + "ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="], + + "electron-to-chromium": ["electron-to-chromium@1.5.166", "", {}, "sha512-QPWqHL0BglzPYyJJ1zSSmwFFL6MFXhbACOCcsCdUMCkzPdS9/OIBVxg516X/Ado2qwAq8k0nJJ7phQPCqiaFAw=="], + + "emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="], + + "encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="], + + "encoding": ["encoding@0.1.13", "", { "dependencies": { "iconv-lite": "^0.6.2" } }, "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A=="], + + "end-of-stream": ["end-of-stream@1.4.4", "", { "dependencies": { "once": "^1.4.0" } }, "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q=="], + + "engine.io": ["engine.io@6.6.4", "", { "dependencies": { "@types/cors": "^2.8.12", "@types/node": ">=10.0.0", "accepts": "~1.3.4", "base64id": "2.0.0", "cookie": "~0.7.2", "cors": "~2.8.5", "debug": "~4.3.1", "engine.io-parser": "~5.2.1", "ws": "~8.17.1" } }, "sha512-ZCkIjSYNDyGn0R6ewHDtXgns/Zre/NT6Agvq1/WobF7JXgFff4SeDroKiCO3fNJreU9YG429Sc81o4w5ok/W5g=="], + + "engine.io-parser": ["engine.io-parser@5.2.3", "", {}, "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q=="], + + "enhanced-resolve": ["enhanced-resolve@5.18.1", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg=="], + + "ent": ["ent@2.2.2", "", { "dependencies": { "call-bound": "^1.0.3", "es-errors": "^1.3.0", "punycode": "^1.4.1", "safe-regex-test": "^1.1.0" } }, "sha512-kKvD1tO6BM+oK9HzCPpUdRb4vKFQY/FPTFmurMvh6LlN68VMrdj77w8yp51/kDbpkFOS9J8w5W6zIzgM2H8/hw=="], + + "entities": ["entities@6.0.1", "", {}, "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g=="], + + "env-paths": ["env-paths@2.2.1", "", {}, "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A=="], + + "environment": ["environment@1.1.0", "", {}, "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q=="], + + "err-code": ["err-code@2.0.3", "", {}, "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA=="], + + "es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="], + + "es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="], + + "es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="], + + "es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="], + + "esbuild": ["esbuild@0.25.5", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.5", "@esbuild/android-arm": "0.25.5", "@esbuild/android-arm64": "0.25.5", "@esbuild/android-x64": "0.25.5", "@esbuild/darwin-arm64": "0.25.5", "@esbuild/darwin-x64": "0.25.5", "@esbuild/freebsd-arm64": "0.25.5", "@esbuild/freebsd-x64": "0.25.5", "@esbuild/linux-arm": "0.25.5", "@esbuild/linux-arm64": "0.25.5", "@esbuild/linux-ia32": "0.25.5", "@esbuild/linux-loong64": "0.25.5", "@esbuild/linux-mips64el": "0.25.5", "@esbuild/linux-ppc64": "0.25.5", "@esbuild/linux-riscv64": "0.25.5", "@esbuild/linux-s390x": "0.25.5", "@esbuild/linux-x64": "0.25.5", "@esbuild/netbsd-arm64": "0.25.5", "@esbuild/netbsd-x64": "0.25.5", "@esbuild/openbsd-arm64": "0.25.5", "@esbuild/openbsd-x64": "0.25.5", "@esbuild/sunos-x64": "0.25.5", "@esbuild/win32-arm64": "0.25.5", "@esbuild/win32-ia32": "0.25.5", "@esbuild/win32-x64": "0.25.5" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-P8OtKZRv/5J5hhz0cUAdu/cLuPIKXpQl1R9pZtvmHWQvrAUVd0UNIPT4IB4W3rNOqVO0rlqHmCIbSwxh/c9yUQ=="], + + "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="], + + "escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="], + + "escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="], + + "eslint": ["eslint@8.57.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", "@eslint/js": "8.57.1", "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.2.2", "eslint-visitor-keys": "^3.4.3", "espree": "^9.6.1", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "globals": "^13.19.0", "graphemer": "^1.4.0", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3", "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "bin": { "eslint": "bin/eslint.js" } }, "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA=="], + + "eslint-scope": ["eslint-scope@7.2.2", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg=="], + + "eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="], + + "espree": ["espree@9.6.1", "", { "dependencies": { "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^3.4.1" } }, "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ=="], + + "esquery": ["esquery@1.6.0", "", { "dependencies": { "estraverse": "^5.1.0" } }, "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="], + + "esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="], + + "estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="], + + "esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="], + + "event-target-shim": ["event-target-shim@5.0.1", "", {}, "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ=="], + + "eventemitter3": ["eventemitter3@5.0.1", "", {}, "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="], + + "events": ["events@3.3.0", "", {}, "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="], + + "exponential-backoff": ["exponential-backoff@3.1.2", "", {}, "sha512-8QxYTVXUkuy7fIIoitQkPwGonB8F3Zj8eEO8Sqg9Zv/bkI7RJAzowee4gr81Hak/dUTpA2Z7VfQgoijjPNlUZA=="], + + "extend": ["extend@3.0.2", "", {}, "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="], + + "external-editor": ["external-editor@3.1.0", "", { "dependencies": { "chardet": "^0.7.0", "iconv-lite": "^0.4.24", "tmp": "^0.0.33" } }, "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew=="], + + "fast-copy": ["fast-copy@3.0.2", "", {}, "sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ=="], + + "fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="], + + "fast-fifo": ["fast-fifo@1.3.2", "", {}, "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ=="], + + "fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="], + + "fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="], + + "fast-levenshtein": ["fast-levenshtein@2.0.6", "", {}, "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="], + + "fast-redact": ["fast-redact@3.5.0", "", {}, "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A=="], + + "fast-safe-stringify": ["fast-safe-stringify@2.1.1", "", {}, "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA=="], + + "fast-uri": ["fast-uri@3.0.6", "", {}, "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw=="], + + "fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="], + + "fdir": ["fdir@6.4.5", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-4BG7puHpVsIYxZUbiUE3RqGloLaSSwzYie5jvasC4LWuBWzZawynvYouhjbQKw2JuIGYdm0DzIxl8iVidKlUEw=="], + + "file-entry-cache": ["file-entry-cache@6.0.1", "", { "dependencies": { "flat-cache": "^3.0.4" } }, "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg=="], + + "fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="], + + "finalhandler": ["finalhandler@1.1.2", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "on-finished": "~2.3.0", "parseurl": "~1.3.3", "statuses": "~1.5.0", "unpipe": "~1.0.0" } }, "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA=="], + + "find-cache-dir": ["find-cache-dir@3.3.2", "", { "dependencies": { "commondir": "^1.0.1", "make-dir": "^3.0.2", "pkg-dir": "^4.1.0" } }, "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig=="], + + "find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="], + + "flat-cache": ["flat-cache@3.2.0", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.3", "rimraf": "^3.0.2" } }, "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw=="], + + "flatted": ["flatted@3.3.3", "", {}, "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="], + + "follow-redirects": ["follow-redirects@1.15.9", "", {}, "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ=="], + + "foreground-child": ["foreground-child@3.3.1", "", { "dependencies": { "cross-spawn": "^7.0.6", "signal-exit": "^4.0.1" } }, "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw=="], + + "form-data": ["form-data@4.0.3", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, "sha512-qsITQPfmvMOSAdeyZ+12I1c+CKSstAFAwu+97zrnWAbIr5u8wfsExUzCesVLC8NgHuRUqNN4Zy6UPWUTRGslcA=="], + + "form-data-encoder": ["form-data-encoder@4.1.0", "", {}, "sha512-G6NsmEW15s0Uw9XnCg+33H3ViYRyiM0hMrMhhqQOR8NFc5GhYrI+6I3u7OTw7b91J2g8rtvMBZJDbcGb2YUniw=="], + + "formidable": ["formidable@2.1.5", "", { "dependencies": { "@paralleldrive/cuid2": "^2.2.2", "dezalgo": "^1.0.4", "once": "^1.4.0", "qs": "^6.11.0" } }, "sha512-Oz5Hwvwak/DCaXVVUtPn4oLMLLy1CdclLKO1LFgU7XzDpVMUU5UjlSLpGMocyQNNk8F6IJW9M/YdooSn2MRI+Q=="], + + "fraction.js": ["fraction.js@4.3.7", "", {}, "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew=="], + + "fs-constants": ["fs-constants@1.0.0", "", {}, "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="], + + "fs-extra": ["fs-extra@8.1.0", "", { "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^4.0.0", "universalify": "^0.1.0" } }, "sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g=="], + + "fs-minipass": ["fs-minipass@3.0.3", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw=="], + + "fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="], + + "fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], + + "function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="], + + "functional-red-black-tree": ["functional-red-black-tree@1.0.1", "", {}, "sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g=="], + + "gensync": ["gensync@1.0.0-beta.2", "", {}, "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="], + + "get-caller-file": ["get-caller-file@2.0.5", "", {}, "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="], + + "get-east-asian-width": ["get-east-asian-width@1.3.0", "", {}, "sha512-vpeMIQKxczTD/0s2CdEWHcb0eeJe6TFjxb+J5xgX7hScxqrGuyjmv4c1D4A/gelKfyox0gJJwIHF+fLjeaM8kQ=="], + + "get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="], + + "get-port": ["get-port@7.1.0", "", {}, "sha512-QB9NKEeDg3xxVwCCwJQ9+xycaz6pBB6iQ76wiWMl1927n0Kir6alPiP+yuiICLLU4jpMe08dXfpebuQppFA2zw=="], + + "get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="], + + "get-stream": ["get-stream@9.0.1", "", { "dependencies": { "@sec-ant/readable-stream": "^0.4.1", "is-stream": "^4.0.1" } }, "sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA=="], + + "glob": ["glob@7.2.3", "", { "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="], + + "glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="], + + "glob-to-regexp": ["glob-to-regexp@0.4.1", "", {}, "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw=="], + + "globals": ["globals@13.24.0", "", { "dependencies": { "type-fest": "^0.20.2" } }, "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ=="], + + "globby": ["globby@11.1.0", "", { "dependencies": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", "fast-glob": "^3.2.9", "ignore": "^5.2.0", "merge2": "^1.4.1", "slash": "^3.0.0" } }, "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g=="], + + "gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="], + + "got": ["got@14.4.7", "", { "dependencies": { "@sindresorhus/is": "^7.0.1", "@szmarczak/http-timer": "^5.0.1", "cacheable-lookup": "^7.0.0", "cacheable-request": "^12.0.1", "decompress-response": "^6.0.0", "form-data-encoder": "^4.0.2", "http2-wrapper": "^2.2.1", "lowercase-keys": "^3.0.0", "p-cancelable": "^4.0.1", "responselike": "^3.0.0", "type-fest": "^4.26.1" } }, "sha512-DI8zV1231tqiGzOiOzQWDhsBmncFW7oQDH6Zgy6pDPrqJuVZMtoSgPLLsBZQj8Jg4JFfwoOsDA8NGtLQLnIx2g=="], + + "graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="], + + "graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="], + + "has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="], + + "has-property-descriptors": ["has-property-descriptors@1.0.2", "", { "dependencies": { "es-define-property": "^1.0.0" } }, "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg=="], + + "has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="], + + "has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="], + + "hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="], + + "help-me": ["help-me@5.0.0", "", {}, "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg=="], + + "hono": ["hono@4.7.11", "", {}, "sha512-rv0JMwC0KALbbmwJDEnxvQCeJh+xbS3KEWW5PC9cMJ08Ur9xgatI0HmtgYZfOdOSOeYsp5LO2cOhdI8cLEbDEQ=="], + + "hosted-git-info": ["hosted-git-info@8.1.0", "", { "dependencies": { "lru-cache": "^10.0.1" } }, "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw=="], + + "html-escaper": ["html-escaper@2.0.2", "", {}, "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg=="], + + "htmlparser2": ["htmlparser2@10.0.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.2.1", "entities": "^6.0.0" } }, "sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g=="], + + "http-cache-semantics": ["http-cache-semantics@4.2.0", "", {}, "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ=="], + + "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="], + + "http-proxy": ["http-proxy@1.18.1", "", { "dependencies": { "eventemitter3": "^4.0.0", "follow-redirects": "^1.0.0", "requires-port": "^1.0.0" } }, "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ=="], + + "http-proxy-agent": ["http-proxy-agent@7.0.2", "", { "dependencies": { "agent-base": "^7.1.0", "debug": "^4.3.4" } }, "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="], + + "http2-wrapper": ["http2-wrapper@2.2.1", "", { "dependencies": { "quick-lru": "^5.1.1", "resolve-alpn": "^1.2.0" } }, "sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ=="], + + "https-proxy-agent": ["https-proxy-agent@7.0.6", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "4" } }, "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="], + + "iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="], + + "ieee754": ["ieee754@1.2.1", "", {}, "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="], + + "ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="], + + "ignore-walk": ["ignore-walk@7.0.0", "", { "dependencies": { "minimatch": "^9.0.0" } }, "sha512-T4gbf83A4NH95zvhVYZc+qWocBBGlpzUXLPGurJggw/WIOwicfXJChLDP/iBZnN5WqROSu5Bm3hhle4z8a8YGQ=="], + + "immutable": ["immutable@4.3.7", "", {}, "sha512-1hqclzwYwjRDFLjcFxOM5AYkkG0rpFPpr1RLPMEuGczoS7YA8gLhy8SWXYRAA/XwfEHpfo3cw5JGioS32fnMRw=="], + + "import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="], + + "imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="], + + "inflight": ["inflight@1.0.6", "", { "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="], + + "inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="], + + "ini": ["ini@5.0.0", "", {}, "sha512-+N0ngpO3e7cRUWOJAS7qw0IZIVc6XPrW4MlFBdD066F2L4k1L6ker3hLqSq7iXxU5tgS4WGkIUElWn5vogAEnw=="], + + "ioredis": ["ioredis@5.6.1", "", { "dependencies": { "@ioredis/commands": "^1.1.1", "cluster-key-slot": "^1.1.0", "debug": "^4.3.4", "denque": "^2.1.0", "lodash.defaults": "^4.2.0", "lodash.isarguments": "^3.1.0", "redis-errors": "^1.2.0", "redis-parser": "^3.0.0", "standard-as-callback": "^2.1.0" } }, "sha512-UxC0Yv1Y4WRJiGQxQkP0hfdL0/5/6YvdfOOClRgJ0qppSarkhneSa6UvkMkms0AkdGimSH3Ikqm+6mkMmX7vGA=="], + + "ip-address": ["ip-address@9.0.5", "", { "dependencies": { "jsbn": "1.1.0", "sprintf-js": "^1.1.3" } }, "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g=="], + + "is-binary-path": ["is-binary-path@2.1.0", "", { "dependencies": { "binary-extensions": "^2.0.0" } }, "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="], + + "is-core-module": ["is-core-module@2.16.1", "", { "dependencies": { "hasown": "^2.0.2" } }, "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w=="], + + "is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="], + + "is-fullwidth-code-point": ["is-fullwidth-code-point@3.0.0", "", {}, "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="], + + "is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="], + + "is-interactive": ["is-interactive@2.0.0", "", {}, "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ=="], + + "is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="], + + "is-path-inside": ["is-path-inside@3.0.3", "", {}, "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ=="], + + "is-regex": ["is-regex@1.2.1", "", { "dependencies": { "call-bound": "^1.0.2", "gopd": "^1.2.0", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g=="], + + "is-stream": ["is-stream@4.0.1", "", {}, "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A=="], + + "is-unicode-supported": ["is-unicode-supported@2.1.0", "", {}, "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ=="], + + "isarray": ["isarray@2.0.5", "", {}, "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw=="], + + "isbinaryfile": ["isbinaryfile@4.0.10", "", {}, "sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw=="], + + "isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="], + + "istanbul-lib-coverage": ["istanbul-lib-coverage@3.2.2", "", {}, "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg=="], + + "istanbul-lib-instrument": ["istanbul-lib-instrument@6.0.3", "", { "dependencies": { "@babel/core": "^7.23.9", "@babel/parser": "^7.23.9", "@istanbuljs/schema": "^0.1.3", "istanbul-lib-coverage": "^3.2.0", "semver": "^7.5.4" } }, "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q=="], + + "istanbul-lib-report": ["istanbul-lib-report@3.0.1", "", { "dependencies": { "istanbul-lib-coverage": "^3.0.0", "make-dir": "^4.0.0", "supports-color": "^7.1.0" } }, "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw=="], + + "istanbul-lib-source-maps": ["istanbul-lib-source-maps@4.0.1", "", { "dependencies": { "debug": "^4.1.1", "istanbul-lib-coverage": "^3.0.0", "source-map": "^0.6.1" } }, "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw=="], + + "istanbul-reports": ["istanbul-reports@3.1.7", "", { "dependencies": { "html-escaper": "^2.0.0", "istanbul-lib-report": "^3.0.0" } }, "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g=="], + + "jackspeak": ["jackspeak@3.4.3", "", { "dependencies": { "@isaacs/cliui": "^8.0.2" }, "optionalDependencies": { "@pkgjs/parseargs": "^0.11.0" } }, "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="], + + "jasmine-core": ["jasmine-core@5.7.1", "", {}, "sha512-QnurrtpKsPoixxG2R3d1xP0St/2kcX5oTZyDyQJMY+Vzi/HUlu1kGm+2V8Tz+9lV991leB1l0xcsyz40s9xOOw=="], + + "jiti": ["jiti@2.4.2", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A=="], + + "joycon": ["joycon@3.1.1", "", {}, "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw=="], + + "js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="], + + "js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="], + + "jsbn": ["jsbn@1.1.0", "", {}, "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A=="], + + "jsesc": ["jsesc@3.1.0", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA=="], + + "json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="], + + "json-parse-even-better-errors": ["json-parse-even-better-errors@4.0.0", "", {}, "sha512-lR4MXjGNgkJc7tkQ97kb2nuEMnNCyU//XYVH0MKTGcXEiSudQ5MKGKen3C5QubYy0vmq+JGitUg92uuywGEwIA=="], + + "json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="], + + "json-stable-stringify": ["json-stable-stringify@1.3.0", "", { "dependencies": { "call-bind": "^1.0.8", "call-bound": "^1.0.4", "isarray": "^2.0.5", "jsonify": "^0.0.1", "object-keys": "^1.1.1" } }, "sha512-qtYiSSFlwot9XHtF9bD9c7rwKjr+RecWT//ZnPvSmEjpV5mmPOCN4j8UjY5hbjNkOwZ/jQv3J6R1/pL7RwgMsg=="], + + "json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="], + + "json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="], + + "jsonc-parser": ["jsonc-parser@3.3.1", "", {}, "sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ=="], + + "jsonfile": ["jsonfile@4.0.0", "", { "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg=="], + + "jsonify": ["jsonify@0.0.1", "", {}, "sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg=="], + + "jsonparse": ["jsonparse@1.3.1", "", {}, "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg=="], + + "karma": ["karma@6.4.4", "", { "dependencies": { "@colors/colors": "1.5.0", "body-parser": "^1.19.0", "braces": "^3.0.2", "chokidar": "^3.5.1", "connect": "^3.7.0", "di": "^0.0.1", "dom-serialize": "^2.2.1", "glob": "^7.1.7", "graceful-fs": "^4.2.6", "http-proxy": "^1.18.1", "isbinaryfile": "^4.0.8", "lodash": "^4.17.21", "log4js": "^6.4.1", "mime": "^2.5.2", "minimatch": "^3.0.4", "mkdirp": "^0.5.5", "qjobs": "^1.2.0", "range-parser": "^1.2.1", "rimraf": "^3.0.2", "socket.io": "^4.7.2", "source-map": "^0.6.1", "tmp": "^0.2.1", "ua-parser-js": "^0.7.30", "yargs": "^16.1.1" }, "bin": { "karma": "bin/karma" } }, "sha512-LrtUxbdvt1gOpo3gxG+VAJlJAEMhbWlM4YrFQgql98FwF7+K8K12LYO4hnDdUkNjeztYrOXEMqgTajSWgmtI/w=="], + + "karma-chrome-launcher": ["karma-chrome-launcher@3.2.0", "", { "dependencies": { "which": "^1.2.1" } }, "sha512-rE9RkUPI7I9mAxByQWkGJFXfFD6lE4gC5nPuZdobf/QdTEJI6EU4yIay/cfU/xV4ZxlM5JiTv7zWYgA64NpS5Q=="], + + "karma-coverage": ["karma-coverage@2.2.1", "", { "dependencies": { "istanbul-lib-coverage": "^3.2.0", "istanbul-lib-instrument": "^5.1.0", "istanbul-lib-report": "^3.0.0", "istanbul-lib-source-maps": "^4.0.1", "istanbul-reports": "^3.0.5", "minimatch": "^3.0.4" } }, "sha512-yj7hbequkQP2qOSb20GuNSIyE//PgJWHwC2IydLE6XRtsnaflv+/OSGNssPjobYUlhVVagy99TQpqUt3vAUG7A=="], + + "karma-jasmine": ["karma-jasmine@5.1.0", "", { "dependencies": { "jasmine-core": "^4.1.0" }, "peerDependencies": { "karma": "^6.0.0" } }, "sha512-i/zQLFrfEpRyQoJF9fsCdTMOF5c2dK7C7OmsuKg2D0YSsuZSfQDiLuaiktbuio6F2wiCsZSnSnieIQ0ant/uzQ=="], + + "karma-jasmine-html-reporter": ["karma-jasmine-html-reporter@2.1.0", "", { "peerDependencies": { "jasmine-core": "^4.0.0 || ^5.0.0", "karma": "^6.0.0", "karma-jasmine": "^5.0.0" } }, "sha512-sPQE1+nlsn6Hwb5t+HHwyy0A1FNCVKuL1192b+XNauMYWThz2kweiBVW1DqloRpVvZIJkIoHVB7XRpK78n1xbQ=="], + + "keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="], + + "lazystream": ["lazystream@1.0.1", "", { "dependencies": { "readable-stream": "^2.0.5" } }, "sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw=="], + + "levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="], + + "lightningcss": ["lightningcss@1.30.1", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-darwin-arm64": "1.30.1", "lightningcss-darwin-x64": "1.30.1", "lightningcss-freebsd-x64": "1.30.1", "lightningcss-linux-arm-gnueabihf": "1.30.1", "lightningcss-linux-arm64-gnu": "1.30.1", "lightningcss-linux-arm64-musl": "1.30.1", "lightningcss-linux-x64-gnu": "1.30.1", "lightningcss-linux-x64-musl": "1.30.1", "lightningcss-win32-arm64-msvc": "1.30.1", "lightningcss-win32-x64-msvc": "1.30.1" } }, "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg=="], + + "lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ=="], + + "lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-k1EvjakfumAQoTfcXUcHQZhSpLlkAuEkdMBsI/ivWw9hL+7FtilQc0Cy3hrx0AAQrVtQAbMI7YjCgYgvn37PzA=="], + + "lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.1", "", { "os": "freebsd", "cpu": "x64" }, "sha512-kmW6UGCGg2PcyUE59K5r0kWfKPAVy4SltVeut+umLCFoJ53RdCUWxcRDzO1eTaxf/7Q2H7LTquFHPL5R+Gjyig=="], + + "lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.1", "", { "os": "linux", "cpu": "arm" }, "sha512-MjxUShl1v8pit+6D/zSPq9S9dQ2NPFSQwGvxBCYaBYLPlCWuPh9/t1MRS8iUaR8i+a6w7aps+B4N0S1TYP/R+Q=="], + + "lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-gB72maP8rmrKsnKYy8XUuXi/4OctJiuQjcuqWNlJQ6jZiWqtPvqFziskH3hnajfvKB27ynbVCucKSm2rkQp4Bw=="], + + "lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-jmUQVx4331m6LIX+0wUhBbmMX7TCfjF5FoOH6SD1CttzuYlGNVpA7QnrmLxrsub43ClTINfGSYyHe2HWeLl5CQ=="], + + "lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-piWx3z4wN8J8z3+O5kO74+yr6ze/dKmPnI7vLqfSqI8bccaTGY5xiSGVIJBDd5K5BHlvVLpUB3S2YCfelyJ1bw=="], + + "lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.1", "", { "os": "linux", "cpu": "x64" }, "sha512-rRomAK7eIkL+tHY0YPxbc5Dra2gXlI63HL+v1Pdi1a3sC+tJTcFrHX+E86sulgAXeI7rSzDYhPSeHHjqFhqfeQ=="], + + "lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.1", "", { "os": "win32", "cpu": "arm64" }, "sha512-mSL4rqPi4iXq5YVqzSsJgMVFENoa4nGTT/GjO2c0Yl9OuQfPsIfncvLrEW6RbbB24WtZ3xP/2CCmI3tNkNV4oA=="], + + "lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.1", "", { "os": "win32", "cpu": "x64" }, "sha512-PVqXh48wh4T53F/1CCu8PIPCxLzWyCnn/9T5W1Jpmdy5h9Cwd+0YQS6/LwhHXSafuc61/xg9Lv5OrCby6a++jg=="], + + "listr2": ["listr2@8.3.3", "", { "dependencies": { "cli-truncate": "^4.0.0", "colorette": "^2.0.20", "eventemitter3": "^5.0.1", "log-update": "^6.1.0", "rfdc": "^1.4.1", "wrap-ansi": "^9.0.0" } }, "sha512-LWzX2KsqcB1wqQ4AHgYb4RsDXauQiqhjLk+6hjbaeHG4zpjjVAB6wC/gz6X0l+Du1cN3pUB5ZlrvTbhGSNnUQQ=="], + + "lmdb": ["lmdb@3.3.0", "", { "dependencies": { "msgpackr": "^1.11.2", "node-addon-api": "^6.1.0", "node-gyp-build-optional-packages": "5.2.2", "ordered-binary": "^1.5.3", "weak-lru-cache": "^1.2.2" }, "optionalDependencies": { "@lmdb/lmdb-darwin-arm64": "3.3.0", "@lmdb/lmdb-darwin-x64": "3.3.0", "@lmdb/lmdb-linux-arm": "3.3.0", "@lmdb/lmdb-linux-arm64": "3.3.0", "@lmdb/lmdb-linux-x64": "3.3.0", "@lmdb/lmdb-win32-arm64": "3.3.0", "@lmdb/lmdb-win32-x64": "3.3.0" }, "bin": { "download-lmdb-prebuilds": "bin/download-prebuilds.js" } }, "sha512-MgJocUI6QEiSXQBFWLeyo1R7eQj8Rke5dlPxX0KFwli8/bsCxpM/KbXO5y0qmV/5llQ3wpneDWcTYxa+4vn8iQ=="], + + "locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="], + + "lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="], + + "lodash.camelcase": ["lodash.camelcase@4.3.0", "", {}, "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="], + + "lodash.defaults": ["lodash.defaults@4.2.0", "", {}, "sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ=="], + + "lodash.isarguments": ["lodash.isarguments@3.1.0", "", {}, "sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg=="], + + "lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="], + + "log-symbols": ["log-symbols@6.0.0", "", { "dependencies": { "chalk": "^5.3.0", "is-unicode-supported": "^1.3.0" } }, "sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw=="], + + "log-update": ["log-update@6.1.0", "", { "dependencies": { "ansi-escapes": "^7.0.0", "cli-cursor": "^5.0.0", "slice-ansi": "^7.1.0", "strip-ansi": "^7.1.0", "wrap-ansi": "^9.0.0" } }, "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w=="], + + "log4js": ["log4js@6.9.1", "", { "dependencies": { "date-format": "^4.0.14", "debug": "^4.3.4", "flatted": "^3.2.7", "rfdc": "^1.3.0", "streamroller": "^3.1.5" } }, "sha512-1somDdy9sChrr9/f4UlzhdaGfDR2c/SaD2a4T7qEkG4jTS57/B3qmnjLYePwQ8cqWnUHZI0iAKxMBpCZICiZ2g=="], + + "long": ["long@5.3.2", "", {}, "sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA=="], + + "lowercase-keys": ["lowercase-keys@3.0.0", "", {}, "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ=="], + + "lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="], + + "luxon": ["luxon@3.6.1", "", {}, "sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ=="], + + "magic-string": ["magic-string@0.30.17", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0" } }, "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA=="], + + "make-dir": ["make-dir@4.0.0", "", { "dependencies": { "semver": "^7.5.3" } }, "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw=="], + + "make-fetch-happen": ["make-fetch-happen@14.0.3", "", { "dependencies": { "@npmcli/agent": "^3.0.0", "cacache": "^19.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "ssri": "^12.0.0" } }, "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ=="], + + "math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="], + + "media-typer": ["media-typer@0.3.0", "", {}, "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="], + + "memory-pager": ["memory-pager@1.5.0", "", {}, "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg=="], + + "merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="], + + "methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="], + + "micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="], + + "mime": ["mime@2.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg=="], + + "mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="], + + "mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="], + + "mimic-function": ["mimic-function@5.0.1", "", {}, "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA=="], + + "mimic-response": ["mimic-response@4.0.0", "", {}, "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg=="], + + "minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="], + + "minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="], + + "minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="], + + "minipass-collect": ["minipass-collect@2.0.1", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw=="], + + "minipass-fetch": ["minipass-fetch@4.0.1", "", { "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", "minizlib": "^3.0.1" }, "optionalDependencies": { "encoding": "^0.1.13" } }, "sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ=="], + + "minipass-flush": ["minipass-flush@1.0.5", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw=="], + + "minipass-pipeline": ["minipass-pipeline@1.2.4", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A=="], + + "minipass-sized": ["minipass-sized@1.0.3", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g=="], + + "minizlib": ["minizlib@3.0.2", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA=="], + + "mkdirp": ["mkdirp@0.5.6", "", { "dependencies": { "minimist": "^1.2.6" }, "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw=="], + + "mkdirp-classic": ["mkdirp-classic@0.5.3", "", {}, "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="], + + "moment": ["moment@2.30.1", "", {}, "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how=="], + + "mongodb": ["mongodb@6.17.0", "", { "dependencies": { "@mongodb-js/saslprep": "^1.1.9", "bson": "^6.10.4", "mongodb-connection-string-url": "^3.0.0" }, "peerDependencies": { "@aws-sdk/credential-providers": "^3.188.0", "@mongodb-js/zstd": "^1.1.0 || ^2.0.0", "gcp-metadata": "^5.2.0", "kerberos": "^2.0.1", "mongodb-client-encryption": ">=6.0.0 <7", "snappy": "^7.2.2", "socks": "^2.7.1" }, "optionalPeers": ["@aws-sdk/credential-providers", "@mongodb-js/zstd", "gcp-metadata", "kerberos", "mongodb-client-encryption", "snappy", "socks"] }, "sha512-neerUzg/8U26cgruLysKEjJvoNSXhyID3RvzvdcpsIi2COYM3FS3o9nlH7fxFtefTb942dX3W9i37oPfCVj4wA=="], + + "mongodb-connection-string-url": ["mongodb-connection-string-url@3.0.2", "", { "dependencies": { "@types/whatwg-url": "^11.0.2", "whatwg-url": "^14.1.0 || ^13.0.0" } }, "sha512-rMO7CGo/9BFwyZABcKAWL8UJwH/Kc2x0g72uhDWzG48URRax5TCIcJ7Rc3RZqffZzO/Gwff/jyKwCU9TN8gehA=="], + + "mongodb-memory-server": ["mongodb-memory-server@9.5.0", "", { "dependencies": { "mongodb-memory-server-core": "9.5.0", "tslib": "^2.6.3" } }, "sha512-In3zRT40cLlVtpy7FK6b96Lby6JBAdXj8Kf9YrH4p1Aa2X4ptojq7SmiRR3x47Lo0/UCXXIwhJpkdbYY8kRZAw=="], + + "mongodb-memory-server-core": ["mongodb-memory-server-core@9.5.0", "", { "dependencies": { "async-mutex": "^0.4.1", "camelcase": "^6.3.0", "debug": "^4.3.7", "find-cache-dir": "^3.3.2", "follow-redirects": "^1.15.9", "https-proxy-agent": "^7.0.5", "mongodb": "^5.9.2", "new-find-package-json": "^2.0.0", "semver": "^7.6.3", "tar-stream": "^3.1.7", "tslib": "^2.6.3", "yauzl": "^3.1.3" } }, "sha512-Jb/V80JeYAKWaF4bPFme7SmTR6ew1PWgkpPUepLDfRraeN49i1cruxICeA4zz4T33W/o31N+zazP8wI8ebf7yw=="], + + "moo": ["moo@0.5.2", "", {}, "sha512-iSAJLHYKnX41mKcJKjqvnAN9sf0LMDTXDEvFv+ffuRR9a1MIuXLjMNL6EsnDHSkKLTWNqQQ5uo61P4EbU4NU+Q=="], + + "mrmime": ["mrmime@2.0.1", "", {}, "sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ=="], + + "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + + "msgpackr": ["msgpackr@1.11.4", "", { "optionalDependencies": { "msgpackr-extract": "^3.0.2" } }, "sha512-uaff7RG9VIC4jacFW9xzL3jc0iM32DNHe4jYVycBcjUePT/Klnfj7pqtWJt9khvDFizmjN2TlYniYmSS2LIaZg=="], + + "msgpackr-extract": ["msgpackr-extract@3.0.3", "", { "dependencies": { "node-gyp-build-optional-packages": "5.2.2" }, "optionalDependencies": { "@msgpackr-extract/msgpackr-extract-darwin-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-darwin-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-arm64": "3.0.3", "@msgpackr-extract/msgpackr-extract-linux-x64": "3.0.3", "@msgpackr-extract/msgpackr-extract-win32-x64": "3.0.3" }, "bin": { "download-msgpackr-prebuilds": "bin/download-prebuilds.js" } }, "sha512-P0efT1C9jIdVRefqjzOQ9Xml57zpOXnIuS+csaB4MdZbTdmGDLo8XhzBG1N7aO11gKDDkJvBLULeFTo46wwreA=="], + + "mute-stream": ["mute-stream@2.0.0", "", {}, "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA=="], + + "nan": ["nan@2.22.2", "", {}, "sha512-DANghxFkS1plDdRsX0X9pm0Z6SJNN6gBdtXfanwoZ8hooC5gosGFSBGRYHUVPz1asKA/kMRqDRdHrluZ61SpBQ=="], + + "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="], + + "natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="], + + "nearley": ["nearley@2.20.1", "", { "dependencies": { "commander": "^2.19.0", "moo": "^0.5.0", "railroad-diagrams": "^1.0.0", "randexp": "0.4.6" }, "bin": { "nearleyc": "bin/nearleyc.js", "nearley-test": "bin/nearley-test.js", "nearley-unparse": "bin/nearley-unparse.js", "nearley-railroad": "bin/nearley-railroad.js" } }, "sha512-+Mc8UaAebFzgV+KpI5n7DasuuQCHA89dmwm7JXw3TV43ukfNQ9DnBH3Mdb2g/I4Fdxc26pwimBWvjIw0UAILSQ=="], + + "negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="], + + "new-find-package-json": ["new-find-package-json@2.0.0", "", { "dependencies": { "debug": "^4.3.4" } }, "sha512-lDcBsjBSMlj3LXH2v/FW3txlh2pYTjmbOXPYJD93HI5EwuLzI11tdHSIpUMmfq/IOsldj4Ps8M8flhm+pCK4Ew=="], + + "node-abort-controller": ["node-abort-controller@3.1.1", "", {}, "sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ=="], + + "node-addon-api": ["node-addon-api@6.1.0", "", {}, "sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA=="], + + "node-gyp": ["node-gyp@11.2.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", "make-fetch-happen": "^14.0.3", "nopt": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "tar": "^7.4.3", "tinyglobby": "^0.2.12", "which": "^5.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-T0S1zqskVUSxcsSTkAsLc7xCycrRYmtDHadDinzocrThjyQCn5kMlEBSj6H4qDbgsIOSLmmlRIeb0lZXj+UArA=="], + + "node-gyp-build-optional-packages": ["node-gyp-build-optional-packages@5.2.2", "", { "dependencies": { "detect-libc": "^2.0.1" }, "bin": { "node-gyp-build-optional-packages": "bin.js", "node-gyp-build-optional-packages-optional": "optional.js", "node-gyp-build-optional-packages-test": "build-test.js" } }, "sha512-s+w+rBWnpTMwSFbaE0UXsRlg7hU4FjekKU4eyAih5T8nJuNZT1nNsskXpxmeqSK9UzkBl6UgRlnKc8hz8IEqOw=="], + + "node-releases": ["node-releases@2.0.19", "", {}, "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw=="], + + "nopt": ["nopt@8.1.0", "", { "dependencies": { "abbrev": "^3.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A=="], + + "normalize-path": ["normalize-path@3.0.0", "", {}, "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="], + + "normalize-range": ["normalize-range@0.1.2", "", {}, "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA=="], + + "normalize-url": ["normalize-url@8.0.2", "", {}, "sha512-Ee/R3SyN4BuynXcnTaekmaVdbDAEiNrHqjQIA37mHU8G9pf7aaAD4ZX3XjBLo6rsdcxA/gtkcNYZLt30ACgynw=="], + + "npm-bundled": ["npm-bundled@4.0.0", "", { "dependencies": { "npm-normalize-package-bin": "^4.0.0" } }, "sha512-IxaQZDMsqfQ2Lz37VvyyEtKLe8FsRZuysmedy/N06TU1RyVppYKXrO4xIhR0F+7ubIBox6Q7nir6fQI3ej39iA=="], + + "npm-install-checks": ["npm-install-checks@7.1.1", "", { "dependencies": { "semver": "^7.1.1" } }, "sha512-u6DCwbow5ynAX5BdiHQ9qvexme4U3qHW3MWe5NqH+NeBm0LbiH6zvGjNNew1fY+AZZUtVHbOPF3j7mJxbUzpXg=="], + + "npm-normalize-package-bin": ["npm-normalize-package-bin@4.0.0", "", {}, "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w=="], + + "npm-package-arg": ["npm-package-arg@12.0.2", "", { "dependencies": { "hosted-git-info": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "validate-npm-package-name": "^6.0.0" } }, "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA=="], + + "npm-packlist": ["npm-packlist@10.0.0", "", { "dependencies": { "ignore-walk": "^7.0.0" } }, "sha512-rht9U6nS8WOBDc53eipZNPo5qkAV4X2rhKE2Oj1DYUQ3DieXfj0mKkVmjnf3iuNdtMd8WfLdi2L6ASkD/8a+Kg=="], + + "npm-pick-manifest": ["npm-pick-manifest@10.0.0", "", { "dependencies": { "npm-install-checks": "^7.1.0", "npm-normalize-package-bin": "^4.0.0", "npm-package-arg": "^12.0.0", "semver": "^7.3.5" } }, "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ=="], + + "npm-registry-fetch": ["npm-registry-fetch@18.0.2", "", { "dependencies": { "@npmcli/redact": "^3.0.0", "jsonparse": "^1.3.1", "make-fetch-happen": "^14.0.0", "minipass": "^7.0.2", "minipass-fetch": "^4.0.0", "minizlib": "^3.0.1", "npm-package-arg": "^12.0.0", "proc-log": "^5.0.0" } }, "sha512-LeVMZBBVy+oQb5R6FDV9OlJCcWDU+al10oKpe+nsvcHnG24Z3uM3SvJYKfGJlfGjVU8v9liejCrUR/M5HO5NEQ=="], + + "nth-check": ["nth-check@2.1.1", "", { "dependencies": { "boolbase": "^1.0.0" } }, "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w=="], + + "object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="], + + "object-hash": ["object-hash@2.2.0", "", {}, "sha512-gScRMn0bS5fH+IuwyIFgnh9zBdo4DV+6GhygmWM9HyNJSgS0hScp1f5vjtm7oIIOiT9trXrShAkLFSc2IqKNgw=="], + + "object-inspect": ["object-inspect@1.13.4", "", {}, "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew=="], + + "object-keys": ["object-keys@1.1.1", "", {}, "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA=="], + + "on-exit-leak-free": ["on-exit-leak-free@2.1.2", "", {}, "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA=="], + + "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="], + + "once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="], + + "onetime": ["onetime@7.0.0", "", { "dependencies": { "mimic-function": "^5.0.0" } }, "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="], + + "optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="], + + "ora": ["ora@8.2.0", "", { "dependencies": { "chalk": "^5.3.0", "cli-cursor": "^5.0.0", "cli-spinners": "^2.9.2", "is-interactive": "^2.0.0", "is-unicode-supported": "^2.0.0", "log-symbols": "^6.0.0", "stdin-discarder": "^0.2.2", "string-width": "^7.2.0", "strip-ansi": "^7.1.0" } }, "sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw=="], + + "ordered-binary": ["ordered-binary@1.5.3", "", {}, "sha512-oGFr3T+pYdTGJ+YFEILMpS3es+GiIbs9h/XQrclBXUtd44ey7XwfsMzM31f64I1SQOawDoDr/D823kNCADI8TA=="], + + "os-tmpdir": ["os-tmpdir@1.0.2", "", {}, "sha512-D2FR03Vir7FIu45XBY20mTb+/ZSWB00sjU9jdQXt83gDrI4Ztz5Fs7/yy74g2N5SVQY4xY1qDr4rNddwYRVX0g=="], + + "p-cancelable": ["p-cancelable@4.0.1", "", {}, "sha512-wBowNApzd45EIKdO1LaU+LrMBwAcjfPaYtVzV3lmfM3gf8Z4CHZsiIqlM8TZZ8okYvh5A1cP6gTfCRQtwUpaUg=="], + + "p-limit": ["p-limit@6.2.0", "", { "dependencies": { "yocto-queue": "^1.1.1" } }, "sha512-kuUqqHNUqoIWp/c467RI4X6mmyuojY5jGutNU0wVTmEOOfcuwLqyMVoAi9MKi2Ak+5i9+nhmrK4ufZE8069kHA=="], + + "p-locate": ["p-locate@5.0.0", "", { "dependencies": { "p-limit": "^3.0.2" } }, "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw=="], + + "p-map": ["p-map@7.0.3", "", {}, "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA=="], + + "p-try": ["p-try@2.2.0", "", {}, "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ=="], + + "package-json-from-dist": ["package-json-from-dist@1.0.1", "", {}, "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw=="], + + "pacote": ["pacote@21.0.0", "", { "dependencies": { "@npmcli/git": "^6.0.0", "@npmcli/installed-package-contents": "^3.0.0", "@npmcli/package-json": "^6.0.0", "@npmcli/promise-spawn": "^8.0.0", "@npmcli/run-script": "^9.0.0", "cacache": "^19.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", "npm-package-arg": "^12.0.0", "npm-packlist": "^10.0.0", "npm-pick-manifest": "^10.0.0", "npm-registry-fetch": "^18.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "sigstore": "^3.0.0", "ssri": "^12.0.0", "tar": "^6.1.11" }, "bin": { "pacote": "bin/index.js" } }, "sha512-lcqexq73AMv6QNLo7SOpz0JJoaGdS3rBFgF122NZVl1bApo2mfu+XzUBU/X/XsiJu+iUmKpekRayqQYAs+PhkA=="], + + "parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="], + + "parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw=="], + + "parse5-html-rewriting-stream": ["parse5-html-rewriting-stream@7.1.0", "", { "dependencies": { "entities": "^6.0.0", "parse5": "^7.0.0", "parse5-sax-parser": "^7.0.0" } }, "sha512-2ifK6Jb+ONoqOy5f+cYHsqvx1obHQdvIk13Jmt/5ezxP0U9p+fqd+R6O73KblGswyuzBYfetmsfK9ThMgnuPPg=="], + + "parse5-sax-parser": ["parse5-sax-parser@7.0.0", "", { "dependencies": { "parse5": "^7.0.0" } }, "sha512-5A+v2SNsq8T6/mG3ahcz8ZtQ0OUFTatxPbeidoMB7tkJSGDY3tdfl4MHovtLQHkEn5CGxijNWRQHhRQ6IRpXKg=="], + + "parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="], + + "path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="], + + "path-is-absolute": ["path-is-absolute@1.0.1", "", {}, "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="], + + "path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="], + + "path-parse": ["path-parse@1.0.7", "", {}, "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="], + + "path-scurry": ["path-scurry@1.11.1", "", { "dependencies": { "lru-cache": "^10.2.0", "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" } }, "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="], + + "path-type": ["path-type@4.0.0", "", {}, "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="], + + "pend": ["pend@1.2.0", "", {}, "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg=="], + + "pg": ["pg@8.16.0", "", { "dependencies": { "pg-connection-string": "^2.9.0", "pg-pool": "^3.10.0", "pg-protocol": "^1.10.0", "pg-types": "2.2.0", "pgpass": "1.0.5" }, "optionalDependencies": { "pg-cloudflare": "^1.2.5" }, "peerDependencies": { "pg-native": ">=3.0.1" }, "optionalPeers": ["pg-native"] }, "sha512-7SKfdvP8CTNXjMUzfcVTaI+TDzBEeaUnVwiVGZQD1Hh33Kpev7liQba9uLd4CfN8r9mCVsD0JIpq03+Unpz+kg=="], + + "pg-cloudflare": ["pg-cloudflare@1.2.5", "", {}, "sha512-OOX22Vt0vOSRrdoUPKJ8Wi2OpE/o/h9T8X1s4qSkCedbNah9ei2W2765be8iMVxQUsvgT7zIAT2eIa9fs5+vtg=="], + + "pg-connection-string": ["pg-connection-string@2.9.0", "", {}, "sha512-P2DEBKuvh5RClafLngkAuGe9OUlFV7ebu8w1kmaaOgPcpJd1RIFh7otETfI6hAR8YupOLFTY7nuvvIn7PLciUQ=="], + + "pg-int8": ["pg-int8@1.0.1", "", {}, "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw=="], + + "pg-mem": ["pg-mem@2.9.1", "", { "dependencies": { "functional-red-black-tree": "^1.0.1", "immutable": "^4.3.4", "json-stable-stringify": "^1.0.1", "lru-cache": "^6.0.0", "moment": "^2.27.0", "object-hash": "^2.0.3", "pgsql-ast-parser": "^12.0.1" }, "peerDependencies": { "@mikro-orm/core": ">=4.5.3", "@mikro-orm/postgresql": ">=4.5.3", "knex": ">=0.20", "kysely": ">=0.26", "pg-promise": ">=10.8.7", "slonik": ">=23.0.1", "typeorm": ">=0.2.29" }, "optionalPeers": ["@mikro-orm/core", "@mikro-orm/postgresql", "knex", "kysely", "pg-promise", "slonik", "typeorm"] }, "sha512-OYq8vde7qwvAWGCEtIjkBu6zScGYD8hp3ldDIzVgQa1vtuU8ymWww/4fvcgLuFMmDl0r3NX+ZOCw254+/cLdAA=="], + + "pg-pool": ["pg-pool@3.10.0", "", { "peerDependencies": { "pg": ">=8.0" } }, "sha512-DzZ26On4sQ0KmqnO34muPcmKbhrjmyiO4lCCR0VwEd7MjmiKf5NTg/6+apUEu0NF7ESa37CGzFxH513CoUmWnA=="], + + "pg-protocol": ["pg-protocol@1.10.0", "", {}, "sha512-IpdytjudNuLv8nhlHs/UrVBhU0e78J0oIS/0AVdTbWxSOkFUVdsHC/NrorO6nXsQNDTT1kzDSOMJubBQviX18Q=="], + + "pg-types": ["pg-types@2.2.0", "", { "dependencies": { "pg-int8": "1.0.1", "postgres-array": "~2.0.0", "postgres-bytea": "~1.0.0", "postgres-date": "~1.0.4", "postgres-interval": "^1.1.0" } }, "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA=="], + + "pgpass": ["pgpass@1.0.5", "", { "dependencies": { "split2": "^4.1.0" } }, "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug=="], + + "pgsql-ast-parser": ["pgsql-ast-parser@12.0.1", "", { "dependencies": { "moo": "^0.5.1", "nearley": "^2.19.5" } }, "sha512-pe8C6Zh5MsS+o38WlSu18NhrTjAv1UNMeDTs2/Km2ZReZdYBYtwtbWGZKK2BM2izv5CrQpbmP0oI10wvHOwv4A=="], + + "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], + + "picomatch": ["picomatch@4.0.2", "", {}, "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg=="], + + "pino": ["pino@9.7.0", "", { "dependencies": { "atomic-sleep": "^1.0.0", "fast-redact": "^3.1.1", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pino-std-serializers": "^7.0.0", "process-warning": "^5.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.2.0", "safe-stable-stringify": "^2.3.1", "sonic-boom": "^4.0.1", "thread-stream": "^3.0.0" }, "bin": { "pino": "bin.js" } }, "sha512-vnMCM6xZTb1WDmLvtG2lE/2p+t9hDEIvTWJsu6FejkE62vB7gDhvzrpFR4Cw2to+9JNQxVnkAKVPA1KPB98vWg=="], + + "pino-abstract-transport": ["pino-abstract-transport@2.0.0", "", { "dependencies": { "split2": "^4.0.0" } }, "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw=="], + + "pino-loki": ["pino-loki@2.6.0", "", { "dependencies": { "pino-abstract-transport": "^2.0.0", "pump": "^3.0.2" }, "bin": { "pino-loki": "dist/cli.js" } }, "sha512-Qy+NeIdb0YmZe/M5mgnO5aGaAyVaeqgwn45T6VajhRXZlZVfGe1YNYhFa9UZyCeNFAPGaUkD2e9yPGjx+2BBYA=="], + + "pino-pretty": ["pino-pretty@13.0.0", "", { "dependencies": { "colorette": "^2.0.7", "dateformat": "^4.6.3", "fast-copy": "^3.0.2", "fast-safe-stringify": "^2.1.1", "help-me": "^5.0.0", "joycon": "^3.1.1", "minimist": "^1.2.6", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pump": "^3.0.0", "secure-json-parse": "^2.4.0", "sonic-boom": "^4.0.1", "strip-json-comments": "^3.1.1" }, "bin": { "pino-pretty": "bin.js" } }, "sha512-cQBBIVG3YajgoUjo1FdKVRX6t9XPxwB9lcNJVD5GCnNM4Y6T12YYx8c6zEejxQsU0wrg9TwmDulcE9LR7qcJqA=="], + + "pino-std-serializers": ["pino-std-serializers@7.0.0", "", {}, "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA=="], + + "piscina": ["piscina@5.0.0", "", { "optionalDependencies": { "@napi-rs/nice": "^1.0.1" } }, "sha512-R+arufwL7sZvGjAhSMK3TfH55YdGOqhpKXkcwQJr432AAnJX/xxX19PA4QisrmJ+BTTfZVggaz6HexbkQq1l1Q=="], + + "pkg-dir": ["pkg-dir@4.2.0", "", { "dependencies": { "find-up": "^4.0.0" } }, "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ=="], + + "postcss": ["postcss@8.5.4", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-QSa9EBe+uwlGTFmHsPKokv3B/oEMQZxfqW0QqNCyhpa6mB1afzulwn8hihglqAb2pOw+BJgNlmXQ8la2VeHB7w=="], + + "postcss-media-query-parser": ["postcss-media-query-parser@0.2.3", "", {}, "sha512-3sOlxmbKcSHMjlUXQZKQ06jOswE7oVkXPxmZdoB1r5l0q6gTFTQSHxNxOrCccElbW7dxNytifNEo8qidX2Vsig=="], + + "postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ=="], + + "postgres-array": ["postgres-array@2.0.0", "", {}, "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA=="], + + "postgres-bytea": ["postgres-bytea@1.0.0", "", {}, "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w=="], + + "postgres-date": ["postgres-date@1.0.7", "", {}, "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q=="], + + "postgres-interval": ["postgres-interval@1.2.0", "", { "dependencies": { "xtend": "^4.0.0" } }, "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ=="], + + "prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="], + + "proc-log": ["proc-log@5.0.0", "", {}, "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ=="], + + "process": ["process@0.11.10", "", {}, "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A=="], + + "process-nextick-args": ["process-nextick-args@2.0.1", "", {}, "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="], + + "process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="], + + "promise-retry": ["promise-retry@2.0.1", "", { "dependencies": { "err-code": "^2.0.2", "retry": "^0.12.0" } }, "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g=="], + + "proper-lockfile": ["proper-lockfile@4.1.2", "", { "dependencies": { "graceful-fs": "^4.2.4", "retry": "^0.12.0", "signal-exit": "^3.0.2" } }, "sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA=="], + + "properties-reader": ["properties-reader@2.3.0", "", { "dependencies": { "mkdirp": "^1.0.4" } }, "sha512-z597WicA7nDZxK12kZqHr2TcvwNU1GCfA5UwfDY/HDp3hXPoPlb5rlEx9bwGTiJnc0OqbBTkU975jDToth8Gxw=="], + + "property-expr": ["property-expr@2.0.6", "", {}, "sha512-SVtmxhRE/CGkn3eZY1T6pC8Nln6Fr/lu1mKSgRud0eC73whjGfoAogbn78LkD8aFL0zz3bAFerKSnOl7NlErBA=="], + + "protobufjs": ["protobufjs@7.5.3", "", { "dependencies": { "@protobufjs/aspromise": "^1.1.2", "@protobufjs/base64": "^1.1.2", "@protobufjs/codegen": "^2.0.4", "@protobufjs/eventemitter": "^1.1.0", "@protobufjs/fetch": "^1.1.0", "@protobufjs/float": "^1.0.2", "@protobufjs/inquire": "^1.1.0", "@protobufjs/path": "^1.1.2", "@protobufjs/pool": "^1.1.0", "@protobufjs/utf8": "^1.1.0", "@types/node": ">=13.7.0", "long": "^5.0.0" } }, "sha512-sildjKwVqOI2kmFDiXQ6aEB0fjYTafpEvIBs8tOR8qI4spuL9OPROLVu2qZqi/xgCfsHIwVqlaF8JBjWFHnKbw=="], + + "proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="], + + "pump": ["pump@3.0.2", "", { "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw=="], + + "punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="], + + "qjobs": ["qjobs@1.2.0", "", {}, "sha512-8YOJEHtxpySA3fFDyCRxA+UUV+fA+rTWnuWvylOK/NCjhY+b4ocCtmu8TtsWb+mYeU+GCHf/S66KZF/AsteKHg=="], + + "qs": ["qs@6.13.0", "", { "dependencies": { "side-channel": "^1.0.6" } }, "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg=="], + + "queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="], + + "quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="], + + "quick-lru": ["quick-lru@5.1.1", "", {}, "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA=="], + + "railroad-diagrams": ["railroad-diagrams@1.0.0", "", {}, "sha512-cz93DjNeLY0idrCNOH6PviZGRN9GJhsdm9hpn1YCS879fj4W+x5IFJhhkRZcwVgMmFF7R82UA/7Oh+R8lLZg6A=="], + + "randexp": ["randexp@0.4.6", "", { "dependencies": { "discontinuous-range": "1.0.0", "ret": "~0.1.10" } }, "sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ=="], + + "range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="], + + "raw-body": ["raw-body@2.5.2", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA=="], + + "readable-stream": ["readable-stream@4.7.0", "", { "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", "events": "^3.3.0", "process": "^0.11.10", "string_decoder": "^1.3.0" } }, "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg=="], + + "readdir-glob": ["readdir-glob@1.1.3", "", { "dependencies": { "minimatch": "^5.1.0" } }, "sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA=="], + + "readdirp": ["readdirp@4.1.2", "", {}, "sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="], + + "real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="], + + "redis-errors": ["redis-errors@1.2.0", "", {}, "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w=="], + + "redis-parser": ["redis-parser@3.0.0", "", { "dependencies": { "redis-errors": "^1.0.0" } }, "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A=="], + + "reflect-metadata": ["reflect-metadata@0.2.2", "", {}, "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q=="], + + "require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="], + + "require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="], + + "requires-port": ["requires-port@1.0.0", "", {}, "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ=="], + + "resolve": ["resolve@1.22.10", "", { "dependencies": { "is-core-module": "^2.16.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, "bin": { "resolve": "bin/resolve" } }, "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w=="], + + "resolve-alpn": ["resolve-alpn@1.2.1", "", {}, "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g=="], + + "resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="], + + "responselike": ["responselike@3.0.0", "", { "dependencies": { "lowercase-keys": "^3.0.0" } }, "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg=="], + + "restore-cursor": ["restore-cursor@5.1.0", "", { "dependencies": { "onetime": "^7.0.0", "signal-exit": "^4.1.0" } }, "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA=="], + + "ret": ["ret@0.1.15", "", {}, "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg=="], + + "retry": ["retry@0.12.0", "", {}, "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow=="], + + "reusify": ["reusify@1.1.0", "", {}, "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="], + + "rfdc": ["rfdc@1.4.1", "", {}, "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA=="], + + "rimraf": ["rimraf@3.0.2", "", { "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" } }, "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA=="], + + "rollup": ["rollup@4.40.2", "", { "dependencies": { "@types/estree": "1.0.7" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.40.2", "@rollup/rollup-android-arm64": "4.40.2", "@rollup/rollup-darwin-arm64": "4.40.2", "@rollup/rollup-darwin-x64": "4.40.2", "@rollup/rollup-freebsd-arm64": "4.40.2", "@rollup/rollup-freebsd-x64": "4.40.2", "@rollup/rollup-linux-arm-gnueabihf": "4.40.2", "@rollup/rollup-linux-arm-musleabihf": "4.40.2", "@rollup/rollup-linux-arm64-gnu": "4.40.2", "@rollup/rollup-linux-arm64-musl": "4.40.2", "@rollup/rollup-linux-loongarch64-gnu": "4.40.2", "@rollup/rollup-linux-powerpc64le-gnu": "4.40.2", "@rollup/rollup-linux-riscv64-gnu": "4.40.2", "@rollup/rollup-linux-riscv64-musl": "4.40.2", "@rollup/rollup-linux-s390x-gnu": "4.40.2", "@rollup/rollup-linux-x64-gnu": "4.40.2", "@rollup/rollup-linux-x64-musl": "4.40.2", "@rollup/rollup-win32-arm64-msvc": "4.40.2", "@rollup/rollup-win32-ia32-msvc": "4.40.2", "@rollup/rollup-win32-x64-msvc": "4.40.2", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-tfUOg6DTP4rhQ3VjOO6B4wyrJnGOX85requAXvqYTHsOgb2TFJdZ3aWpT8W2kPoypSGP7dZUyzxJ9ee4buM5Fg=="], + + "run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="], + + "rxjs": ["rxjs@7.8.2", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA=="], + + "safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="], + + "safe-regex-test": ["safe-regex-test@1.1.0", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "is-regex": "^1.2.1" } }, "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw=="], + + "safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="], + + "safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="], + + "sass": ["sass@1.88.0", "", { "dependencies": { "chokidar": "^4.0.0", "immutable": "^5.0.2", "source-map-js": ">=0.6.2 <2.0.0" }, "optionalDependencies": { "@parcel/watcher": "^2.4.1" }, "bin": { "sass": "sass.js" } }, "sha512-sF6TWQqjFvr4JILXzG4ucGOLELkESHL+I5QJhh7CNaE+Yge0SI+ehCatsXhJ7ymU1hAFcIS3/PBpjdIbXoyVbg=="], + + "secure-json-parse": ["secure-json-parse@2.7.0", "", {}, "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw=="], + + "semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="], + + "set-function-length": ["set-function-length@1.2.2", "", { "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", "has-property-descriptors": "^1.0.2" } }, "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg=="], + + "setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="], + + "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="], + + "shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="], + + "side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="], + + "side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="], + + "side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="], + + "side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="], + + "signal-exit": ["signal-exit@3.0.7", "", {}, "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ=="], + + "sigstore": ["sigstore@3.1.0", "", { "dependencies": { "@sigstore/bundle": "^3.1.0", "@sigstore/core": "^2.0.0", "@sigstore/protobuf-specs": "^0.4.0", "@sigstore/sign": "^3.1.0", "@sigstore/tuf": "^3.1.0", "@sigstore/verify": "^2.1.0" } }, "sha512-ZpzWAFHIFqyFE56dXqgX/DkDRZdz+rRcjoIk/RQU4IX0wiCv1l8S7ZrXDHcCc+uaf+6o7w3h2l3g6GYG5TKN9Q=="], + + "slash": ["slash@3.0.0", "", {}, "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q=="], + + "slice-ansi": ["slice-ansi@5.0.0", "", { "dependencies": { "ansi-styles": "^6.0.0", "is-fullwidth-code-point": "^4.0.0" } }, "sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ=="], + + "smart-buffer": ["smart-buffer@4.2.0", "", {}, "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg=="], + + "socket.io": ["socket.io@4.8.1", "", { "dependencies": { "accepts": "~1.3.4", "base64id": "~2.0.0", "cors": "~2.8.5", "debug": "~4.3.2", "engine.io": "~6.6.0", "socket.io-adapter": "~2.5.2", "socket.io-parser": "~4.2.4" } }, "sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg=="], + + "socket.io-adapter": ["socket.io-adapter@2.5.5", "", { "dependencies": { "debug": "~4.3.4", "ws": "~8.17.1" } }, "sha512-eLDQas5dzPgOWCk9GuuJC2lBqItuhKI4uxGgo9aIV7MYbk2h9Q6uULEh8WBzThoI7l+qU9Ast9fVUmkqPP9wYg=="], + + "socket.io-parser": ["socket.io-parser@4.2.4", "", { "dependencies": { "@socket.io/component-emitter": "~3.1.0", "debug": "~4.3.1" } }, "sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew=="], + + "socks": ["socks@2.8.4", "", { "dependencies": { "ip-address": "^9.0.5", "smart-buffer": "^4.2.0" } }, "sha512-D3YaD0aRxR3mEcqnidIs7ReYJFVzWdd6fXJYUM8ixcQcJRGTka/b3saV0KflYhyVJXKhb947GndU35SxYNResQ=="], + + "socks-proxy-agent": ["socks-proxy-agent@8.0.5", "", { "dependencies": { "agent-base": "^7.1.2", "debug": "^4.3.4", "socks": "^2.8.3" } }, "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw=="], + + "sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="], + + "source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="], + + "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], + + "source-map-support": ["source-map-support@0.5.21", "", { "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w=="], + + "sparse-bitfield": ["sparse-bitfield@3.0.3", "", { "dependencies": { "memory-pager": "^1.0.2" } }, "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ=="], + + "spdx-correct": ["spdx-correct@3.2.0", "", { "dependencies": { "spdx-expression-parse": "^3.0.0", "spdx-license-ids": "^3.0.0" } }, "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA=="], + + "spdx-exceptions": ["spdx-exceptions@2.5.0", "", {}, "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w=="], + + "spdx-expression-parse": ["spdx-expression-parse@3.0.1", "", { "dependencies": { "spdx-exceptions": "^2.1.0", "spdx-license-ids": "^3.0.0" } }, "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q=="], + + "spdx-license-ids": ["spdx-license-ids@3.0.21", "", {}, "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg=="], + + "split-ca": ["split-ca@1.0.1", "", {}, "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ=="], + + "split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="], + + "sprintf-js": ["sprintf-js@1.1.3", "", {}, "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA=="], + + "ssh-remote-port-forward": ["ssh-remote-port-forward@1.0.4", "", { "dependencies": { "@types/ssh2": "^0.5.48", "ssh2": "^1.4.0" } }, "sha512-x0LV1eVDwjf1gmG7TTnfqIzf+3VPRz7vrNIjX6oYLbeCrf/PeVY6hkT68Mg+q02qXxQhrLjB0jfgvhevoCRmLQ=="], + + "ssh2": ["ssh2@1.16.0", "", { "dependencies": { "asn1": "^0.2.6", "bcrypt-pbkdf": "^1.0.2" }, "optionalDependencies": { "cpu-features": "~0.0.10", "nan": "^2.20.0" } }, "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg=="], + + "ssri": ["ssri@12.0.0", "", { "dependencies": { "minipass": "^7.0.3" } }, "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ=="], + + "standard-as-callback": ["standard-as-callback@2.1.0", "", {}, "sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A=="], + + "statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="], + + "stdin-discarder": ["stdin-discarder@0.2.2", "", {}, "sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ=="], + + "streamroller": ["streamroller@3.1.5", "", { "dependencies": { "date-format": "^4.0.14", "debug": "^4.3.4", "fs-extra": "^8.1.0" } }, "sha512-KFxaM7XT+irxvdqSP1LGLgNWbYN7ay5owZ3r/8t77p+EtSUAfUgtl7be3xtqtOmGUl9K9YPO2ca8133RlTjvKw=="], + + "streamx": ["streamx@2.22.1", "", { "dependencies": { "fast-fifo": "^1.3.2", "text-decoder": "^1.1.0" }, "optionalDependencies": { "bare-events": "^2.2.0" } }, "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA=="], + + "string-width": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], + + "string-width-cjs": ["string-width@4.2.3", "", { "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="], + + "string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="], + + "strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], + + "strip-ansi-cjs": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="], + + "strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="], + + "superagent": ["superagent@8.1.2", "", { "dependencies": { "component-emitter": "^1.3.0", "cookiejar": "^2.1.4", "debug": "^4.3.4", "fast-safe-stringify": "^2.1.1", "form-data": "^4.0.0", "formidable": "^2.1.2", "methods": "^1.1.2", "mime": "2.6.0", "qs": "^6.11.0", "semver": "^7.3.8" } }, "sha512-6WTxW1EB6yCxV5VFOIPQruWGHqc3yI7hEmZK6h+pyk69Lk/Ut7rLUY6W/ONF2MjBuGjvmMiIpsrVJ2vjrHlslA=="], + + "supertest": ["supertest@6.3.4", "", { "dependencies": { "methods": "^1.1.2", "superagent": "^8.1.2" } }, "sha512-erY3HFDG0dPnhw4U+udPfrzXa4xhSG+n4rxfRuZWCUvjFWwKl+OxWf/7zk50s84/fAAs7vf5QAb9uRa0cCykxw=="], + + "supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="], + + "supports-preserve-symlinks-flag": ["supports-preserve-symlinks-flag@1.0.0", "", {}, "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w=="], + + "tailwindcss": ["tailwindcss@4.1.8", "", {}, "sha512-kjeW8gjdxasbmFKpVGrGd5T4i40mV5J2Rasw48QARfYeQ8YS9x02ON9SFWax3Qf616rt4Cp3nVNIj6Hd1mP3og=="], + + "tapable": ["tapable@2.2.2", "", {}, "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg=="], + + "tar": ["tar@6.2.1", "", { "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A=="], + + "tar-fs": ["tar-fs@3.0.9", "", { "dependencies": { "pump": "^3.0.0", "tar-stream": "^3.1.5" }, "optionalDependencies": { "bare-fs": "^4.0.1", "bare-path": "^3.0.0" } }, "sha512-XF4w9Xp+ZQgifKakjZYmFdkLoSWd34VGKcsTCwlNWM7QG3ZbaxnTsaBwnjFZqHRf/rROxaR8rXnbtwdvaDI+lA=="], + + "tar-stream": ["tar-stream@3.1.7", "", { "dependencies": { "b4a": "^1.6.4", "fast-fifo": "^1.2.0", "streamx": "^2.15.0" } }, "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ=="], + + "testcontainers": ["testcontainers@10.28.0", "", { "dependencies": { "@balena/dockerignore": "^1.0.2", "@types/dockerode": "^3.3.35", "archiver": "^7.0.1", "async-lock": "^1.4.1", "byline": "^5.0.0", "debug": "^4.3.5", "docker-compose": "^0.24.8", "dockerode": "^4.0.5", "get-port": "^7.1.0", "proper-lockfile": "^4.1.2", "properties-reader": "^2.3.0", "ssh-remote-port-forward": "^1.0.4", "tar-fs": "^3.0.7", "tmp": "^0.2.3", "undici": "^5.29.0" } }, "sha512-1fKrRRCsgAQNkarjHCMKzBKXSJFmzNTiTbhb5E/j5hflRXChEtHvkefjaHlgkNUjfw92/Dq8LTgwQn6RDBFbMg=="], + + "text-decoder": ["text-decoder@1.2.3", "", { "dependencies": { "b4a": "^1.6.4" } }, "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA=="], + + "text-table": ["text-table@0.2.0", "", {}, "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw=="], + + "thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="], + + "tiny-case": ["tiny-case@1.0.3", "", {}, "sha512-Eet/eeMhkO6TX8mnUteS9zgPbUMQa4I6Kkp5ORiBD5476/m+PIRiumP5tmh5ioJpH7k51Kehawy2UDfsnxxY8Q=="], + + "tinyglobby": ["tinyglobby@0.2.13", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw=="], + + "tmp": ["tmp@0.2.3", "", {}, "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w=="], + + "to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="], + + "toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="], + + "toposort": ["toposort@2.0.2", "", {}, "sha512-0a5EOkAUp8D4moMi2W8ZF8jcga7BgZd91O/yabJCFY8az+XSzeGyTKs0Aoo897iV1Nj6guFq8orWDS96z91oGg=="], + + "tr46": ["tr46@5.1.1", "", { "dependencies": { "punycode": "^2.3.1" } }, "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw=="], + + "trading-dashboard": ["trading-dashboard@workspace:apps/dashboard"], + + "ts-api-utils": ["ts-api-utils@1.4.3", "", { "peerDependencies": { "typescript": ">=4.2.0" } }, "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw=="], + + "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "tuf-js": ["tuf-js@3.0.1", "", { "dependencies": { "@tufjs/models": "3.0.1", "debug": "^4.3.6", "make-fetch-happen": "^14.0.1" } }, "sha512-+68OP1ZzSF84rTckf3FA95vJ1Zlx/uaXyiiKyPd1pA4rZNkpEvDAKmsu1xUSmbF/chCRYgZ6UZkDwC7PmzmAyA=="], + + "turbo": ["turbo@2.5.4", "", { "optionalDependencies": { "turbo-darwin-64": "2.5.4", "turbo-darwin-arm64": "2.5.4", "turbo-linux-64": "2.5.4", "turbo-linux-arm64": "2.5.4", "turbo-windows-64": "2.5.4", "turbo-windows-arm64": "2.5.4" }, "bin": { "turbo": "bin/turbo" } }, "sha512-kc8ZibdRcuWUG1pbYSBFWqmIjynlD8Lp7IB6U3vIzvOv9VG+6Sp8bzyeBWE3Oi8XV5KsQrznyRTBPvrf99E4mA=="], + + "turbo-darwin-64": ["turbo-darwin-64@2.5.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ah6YnH2dErojhFooxEzmvsoZQTMImaruZhFPfMKPBq8sb+hALRdvBNLqfc8NWlZq576FkfRZ/MSi4SHvVFT9PQ=="], + + "turbo-darwin-arm64": ["turbo-darwin-arm64@2.5.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-2+Nx6LAyuXw2MdXb7pxqle3MYignLvS7OwtsP9SgtSBaMlnNlxl9BovzqdYAgkUW3AsYiQMJ/wBRb7d+xemM5A=="], + + "turbo-linux-64": ["turbo-linux-64@2.5.4", "", { "os": "linux", "cpu": "x64" }, "sha512-5May2kjWbc8w4XxswGAl74GZ5eM4Gr6IiroqdLhXeXyfvWEdm2mFYCSWOzz0/z5cAgqyGidF1jt1qzUR8hTmOA=="], + + "turbo-linux-arm64": ["turbo-linux-arm64@2.5.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-/2yqFaS3TbfxV3P5yG2JUI79P7OUQKOUvAnx4MV9Bdz6jqHsHwc9WZPpO4QseQm+NvmgY6ICORnoVPODxGUiJg=="], + + "turbo-windows-64": ["turbo-windows-64@2.5.4", "", { "os": "win32", "cpu": "x64" }, "sha512-EQUO4SmaCDhO6zYohxIjJpOKRN3wlfU7jMAj3CgcyTPvQR/UFLEKAYHqJOnJtymbQmiiM/ihX6c6W6Uq0yC7mA=="], + + "turbo-windows-arm64": ["turbo-windows-arm64@2.5.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-oQ8RrK1VS8lrxkLriotFq+PiF7iiGgkZtfLKF4DDKsmdbPo0O9R2mQxm7jHLuXraRCuIQDWMIw6dpcr7Iykf4A=="], + + "tweetnacl": ["tweetnacl@0.14.5", "", {}, "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="], + + "type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="], + + "type-fest": ["type-fest@2.19.0", "", {}, "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA=="], + + "type-is": ["type-is@1.6.18", "", { "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" } }, "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g=="], + + "typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="], + + "ua-parser-js": ["ua-parser-js@0.7.40", "", { "bin": { "ua-parser-js": "script/cli.js" } }, "sha512-us1E3K+3jJppDBa3Tl0L3MOJiGhe1C6P0+nIvQAFYbxlMAx0h81eOwLmU57xgqToduDDPx3y5QsdjPfDu+FgOQ=="], + + "undici": ["undici@5.29.0", "", { "dependencies": { "@fastify/busboy": "^2.0.0" } }, "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg=="], + + "undici-types": ["undici-types@6.21.0", "", {}, "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="], + + "unique-filename": ["unique-filename@4.0.0", "", { "dependencies": { "unique-slug": "^5.0.0" } }, "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ=="], + + "unique-slug": ["unique-slug@5.0.0", "", { "dependencies": { "imurmurhash": "^0.1.4" } }, "sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg=="], + + "universalify": ["universalify@0.1.2", "", {}, "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="], + + "unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="], + + "update-browserslist-db": ["update-browserslist-db@1.1.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw=="], + + "uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="], + + "util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="], + + "utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="], + + "uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="], + + "validate-npm-package-license": ["validate-npm-package-license@3.0.4", "", { "dependencies": { "spdx-correct": "^3.0.0", "spdx-expression-parse": "^3.0.0" } }, "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew=="], + + "validate-npm-package-name": ["validate-npm-package-name@6.0.1", "", {}, "sha512-OaI//3H0J7ZkR1OqlhGA8cA+Cbk/2xFOQpJOt5+s27/ta9eZwpeervh4Mxh4w0im/kdgktowaqVNR7QOrUd7Yg=="], + + "vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="], + + "vite": ["vite@6.3.5", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ=="], + + "void-elements": ["void-elements@2.0.1", "", {}, "sha512-qZKX4RnBzH2ugr8Lxa7x+0V6XD9Sb/ouARtiasEQCHB1EVU4NXtmHsDDrx1dO4ne5fc3J6EW05BP1Dl0z0iung=="], + + "watchpack": ["watchpack@2.4.2", "", { "dependencies": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" } }, "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw=="], + + "weak-lru-cache": ["weak-lru-cache@1.2.2", "", {}, "sha512-DEAoo25RfSYMuTGc9vPJzZcZullwIqRDSI9LOy+fkCJPi6hykCnfKaXTuPBDuXAUcqHXyOgFtHNp/kB2FjYHbw=="], + + "webidl-conversions": ["webidl-conversions@7.0.0", "", {}, "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g=="], + + "whatwg-url": ["whatwg-url@14.2.0", "", { "dependencies": { "tr46": "^5.1.0", "webidl-conversions": "^7.0.0" } }, "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw=="], + + "which": ["which@1.3.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "which": "./bin/which" } }, "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ=="], + + "word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="], + + "wrap-ansi": ["wrap-ansi@9.0.0", "", { "dependencies": { "ansi-styles": "^6.2.1", "string-width": "^7.0.0", "strip-ansi": "^7.1.0" } }, "sha512-G8ura3S+3Z2G+mkgNRq8dqaFZAuxfsxpBB8OCTGRTCtp+l/v9nbFNmCUP1BZMts3G1142MsZfn6eeUKrr4PD1Q=="], + + "wrap-ansi-cjs": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], + + "wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="], + + "ws": ["ws@8.18.2", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ=="], + + "xtend": ["xtend@4.0.2", "", {}, "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="], + + "y18n": ["y18n@5.0.8", "", {}, "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="], + + "yallist": ["yallist@4.0.0", "", {}, "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="], + + "yaml": ["yaml@2.8.0", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ=="], + + "yargs": ["yargs@17.7.2", "", { "dependencies": { "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", "yargs-parser": "^21.1.1" } }, "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="], + + "yargs-parser": ["yargs-parser@21.1.1", "", {}, "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="], + + "yauzl": ["yauzl@3.2.0", "", { "dependencies": { "buffer-crc32": "~0.2.3", "pend": "~1.2.0" } }, "sha512-Ow9nuGZE+qp1u4JIPvg+uCiUr7xGQWdff7JQSk5VGYTAZMDe2q8lxJ10ygv10qmSj031Ty/6FNJpLO4o1Sgc+w=="], + + "yocto-queue": ["yocto-queue@1.2.1", "", {}, "sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg=="], + + "yoctocolors-cjs": ["yoctocolors-cjs@2.1.2", "", {}, "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA=="], + + "yup": ["yup@1.6.1", "", { "dependencies": { "property-expr": "^2.0.5", "tiny-case": "^1.0.3", "toposort": "^2.0.2", "type-fest": "^2.19.0" } }, "sha512-JED8pB50qbA4FOkDol0bYF/p60qSEDQqBD0/qeIrUCG1KbPBIQ776fCUNb9ldbPcSTxA69g/47XTo4TqWiuXOA=="], + + "zip-stream": ["zip-stream@6.0.1", "", { "dependencies": { "archiver-utils": "^5.0.0", "compress-commons": "^6.0.2", "readable-stream": "^4.0.0" } }, "sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA=="], + + "zone.js": ["zone.js@0.15.1", "", {}, "sha512-XE96n56IQpJM7NAoXswY3XRLcWFW83xe0BiAOeMD7K5k5xecOeul3Qcpx6GqEeeHNkW5DWL5zOyTbEfB4eti8w=="], + + "@angular-devkit/core/ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="], + + "@angular-devkit/core/source-map": ["source-map@0.7.4", "", {}, "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA=="], + + "@angular/compiler-cli/@babel/core": ["@babel/core@7.27.4", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.4", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/traverse": "^7.27.4", "@babel/types": "^7.27.3", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g=="], + + "@angular/compiler-cli/yargs": ["yargs@18.0.0", "", { "dependencies": { "cliui": "^9.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "string-width": "^7.2.0", "y18n": "^5.0.5", "yargs-parser": "^22.0.0" } }, "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg=="], + + "@babel/core/convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], + + "@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], + + "@babel/helper-compilation-targets/lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="], + + "@babel/helper-compilation-targets/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], + + "@babel/traverse/globals": ["globals@11.12.0", "", {}, "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA=="], + + "@inquirer/core/signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], + + "@inquirer/core/wrap-ansi": ["wrap-ansi@6.2.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA=="], + + "@isaacs/cliui/string-width": ["string-width@5.1.2", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^9.2.2", "strip-ansi": "^7.0.1" } }, "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="], + + "@isaacs/cliui/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], + + "@isaacs/cliui/wrap-ansi": ["wrap-ansi@8.1.0", "", { "dependencies": { "ansi-styles": "^6.1.0", "string-width": "^5.0.1", "strip-ansi": "^7.0.1" } }, "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="], + + "@listr2/prompt-adapter-inquirer/@inquirer/type": ["@inquirer/type@1.5.5", "", { "dependencies": { "mute-stream": "^1.0.0" } }, "sha512-MzICLu4yS7V8AA61sANROZ9vT1H3ooca5dSmI1FjZkzq7o/koMsRfQSzRtFo+F3Ao4Sf1C0bpLKejpKB/+j6MA=="], + + "@npmcli/agent/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], + + "@npmcli/git/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], + + "@npmcli/git/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], + + "@npmcli/package-json/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="], + + "@npmcli/promise-spawn/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], + + "@npmcli/run-script/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], + + "@parcel/watcher/detect-libc": ["detect-libc@1.0.3", "", { "bin": { "detect-libc": "./bin/detect-libc.js" } }, "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg=="], + + "@parcel/watcher/node-addon-api": ["node-addon-api@7.1.1", "", {}, "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ=="], + + "@stock-bot/cache/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/config/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/data-frame/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/event-bus/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/http/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/logger/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/mongodb-client/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/postgres-client/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/questdb-client/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/shutdown/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/strategy-engine/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/strategy-service/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/strategy-service/commander": ["commander@11.1.0", "", {}, "sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ=="], + + "@stock-bot/types/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/utils/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@stock-bot/vector-engine/@types/node": ["@types/node@20.19.0", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-hfrc+1tud1xcdVTABC2JiomZJEklMcXYNTVtZLAeqTVWD+qL5jkHKT+1lOtqDdGxt+mB53DTtiz673vfjU8D1Q=="], + + "@tailwindcss/oxide/tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], + + "@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.4.3", "", { "dependencies": { "@emnapi/wasi-threads": "1.0.2", "tslib": "^2.4.0" }, "bundled": true }, "sha512-4m62DuCE07lw01soJwPiBGC0nAww0Q+RY70VZ+n49yDIO13yyinhbWCeNnaob0lakDtWQzSdtNWzJeOJt2ma+g=="], + + "@tailwindcss/oxide-wasm32-wasi/@emnapi/runtime": ["@emnapi/runtime@1.4.3", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ=="], + + "@tailwindcss/oxide-wasm32-wasi/@emnapi/wasi-threads": ["@emnapi/wasi-threads@1.0.2", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-5n3nTJblwRi8LlXkJ9eBzu+kZR8Yxcc7ubakyQTFzPMtIhFpUBRbsnc2Dv88IZDIbCDlBiWrknhB4Lsz7mg6BA=="], + + "@tailwindcss/oxide-wasm32-wasi/@napi-rs/wasm-runtime": ["@napi-rs/wasm-runtime@0.2.11", "", { "dependencies": { "@emnapi/core": "^1.4.3", "@emnapi/runtime": "^1.4.3", "@tybys/wasm-util": "^0.9.0" }, "bundled": true }, "sha512-9DPkXtvHydrcOsopiYpUgPHpmj0HWZKMUnL2dZqpvC42lsratuBG06V5ipyno0fUek5VlFsNQ+AcFATSrJXgMA=="], + + "@tailwindcss/oxide-wasm32-wasi/@tybys/wasm-util": ["@tybys/wasm-util@0.9.0", "", { "dependencies": { "tslib": "^2.4.0" }, "bundled": true }, "sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw=="], + + "@tailwindcss/oxide-wasm32-wasi/tslib": ["tslib@2.8.1", "", { "bundled": true }, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "@tufjs/models/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], + + "@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.3", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg=="], + + "ajv-formats/ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="], + + "ansi-escapes/type-fest": ["type-fest@0.21.3", "", {}, "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w=="], + + "anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], + + "archiver-utils/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="], + + "archiver-utils/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="], + + "bl/buffer": ["buffer@5.7.1", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ=="], + + "bl/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], + + "body-parser/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], + + "cacache/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="], + + "cacache/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], + + "cacache/tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], + + "cli-truncate/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], + + "cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], + + "compress-commons/is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="], + + "connect/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], + + "cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="], + + "decompress-response/mimic-response": ["mimic-response@3.1.0", "", {}, "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="], + + "docker-modem/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], + + "dockerode/tar-fs": ["tar-fs@2.1.3", "", { "dependencies": { "chownr": "^1.1.1", "mkdirp-classic": "^0.5.2", "pump": "^3.0.0", "tar-stream": "^2.1.4" } }, "sha512-090nwYJDmlhwFwEW3QQl+vaNnxsO2yVsd45eTKRBzSzu+hlb1w2K9inVq5b0ngXuLVqQ4ApvsUHHnu/zQNkWAg=="], + + "dockerode/uuid": ["uuid@10.0.0", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="], + + "dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="], + + "encoding/iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw=="], + + "engine.io/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], + + "engine.io/ws": ["ws@8.17.1", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ=="], + + "ent/punycode": ["punycode@1.4.1", "", {}, "sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ=="], + + "external-editor/tmp": ["tmp@0.0.33", "", { "dependencies": { "os-tmpdir": "~1.0.2" } }, "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw=="], + + "fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], + + "finalhandler/debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="], + + "finalhandler/on-finished": ["on-finished@2.3.0", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-ikqdkGAAyf/X/gPhXGvfgAytDZtDbr+bkNUJ0N9h5MI/dmdgCs3l6hoHrcUv41sRKew3jIwrp4qQDXiK99Utww=="], + + "finalhandler/statuses": ["statuses@1.5.0", "", {}, "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA=="], + + "find-cache-dir/make-dir": ["make-dir@3.1.0", "", { "dependencies": { "semver": "^6.0.0" } }, "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw=="], + + "foreground-child/signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], + + "globals/type-fest": ["type-fest@0.20.2", "", {}, "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ=="], + + "got/type-fest": ["type-fest@4.41.0", "", {}, "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA=="], + + "hosted-git-info/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], + + "http-proxy/eventemitter3": ["eventemitter3@4.0.7", "", {}, "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="], + + "ignore-walk/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], + + "istanbul-lib-instrument/@babel/core": ["@babel/core@7.27.4", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.4", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/traverse": "^7.27.4", "@babel/types": "^7.27.3", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g=="], + + "karma/chokidar": ["chokidar@3.6.0", "", { "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", "glob-parent": "~5.1.2", "is-binary-path": "~2.1.0", "is-glob": "~4.0.1", "normalize-path": "~3.0.0", "readdirp": "~3.6.0" }, "optionalDependencies": { "fsevents": "~2.3.2" } }, "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="], + + "karma/yargs": ["yargs@16.2.0", "", { "dependencies": { "cliui": "^7.0.2", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.0", "y18n": "^5.0.5", "yargs-parser": "^20.2.2" } }, "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw=="], + + "karma-coverage/istanbul-lib-instrument": ["istanbul-lib-instrument@5.2.1", "", { "dependencies": { "@babel/core": "^7.12.3", "@babel/parser": "^7.14.7", "@istanbuljs/schema": "^0.1.2", "istanbul-lib-coverage": "^3.2.0", "semver": "^6.3.0" } }, "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg=="], + + "karma-jasmine/jasmine-core": ["jasmine-core@4.6.1", "", {}, "sha512-VYz/BjjmC3klLJlLwA4Kw8ytk0zDSmbbDLNs794VnWmkcCB7I9aAL/D48VNQtmITyPvea2C3jdUMfc3kAoy0PQ=="], + + "lazystream/readable-stream": ["readable-stream@2.3.8", "", { "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="], + + "log-symbols/chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="], + + "log-symbols/is-unicode-supported": ["is-unicode-supported@1.3.0", "", {}, "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ=="], + + "log-update/ansi-escapes": ["ansi-escapes@7.0.0", "", { "dependencies": { "environment": "^1.0.0" } }, "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw=="], + + "log-update/slice-ansi": ["slice-ansi@7.1.0", "", { "dependencies": { "ansi-styles": "^6.2.1", "is-fullwidth-code-point": "^5.0.0" } }, "sha512-bSiSngZ/jWeX93BqeIAbImyTbEihizcwNjFoRUIY/T1wWQsfsm2Vw1agPKylXvQTU7iASGdHhyqRlqQzfz+Htg=="], + + "log-update/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], + + "make-fetch-happen/negotiator": ["negotiator@1.0.0", "", {}, "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg=="], + + "micromatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], + + "minipass-flush/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], + + "minipass-pipeline/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], + + "minipass-sized/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], + + "mongodb-memory-server-core/mongodb": ["mongodb@5.9.2", "", { "dependencies": { "bson": "^5.5.0", "mongodb-connection-string-url": "^2.6.0", "socks": "^2.7.1" }, "optionalDependencies": { "@mongodb-js/saslprep": "^1.1.0" }, "peerDependencies": { "@aws-sdk/credential-providers": "^3.188.0", "@mongodb-js/zstd": "^1.0.0", "kerberos": "^1.0.0 || ^2.0.0", "mongodb-client-encryption": ">=2.3.0 <3", "snappy": "^7.2.2" }, "optionalPeers": ["@aws-sdk/credential-providers", "@mongodb-js/zstd", "kerberos", "mongodb-client-encryption", "snappy"] }, "sha512-H60HecKO4Bc+7dhOv4sJlgvenK4fQNqqUIlXxZYQNbfEWSALGAwGoyJd/0Qwk4TttFXUOHJ2ZJQe/52ScaUwtQ=="], + + "nearley/commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="], + + "node-gyp/tar": ["tar@7.4.3", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" } }, "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw=="], + + "node-gyp/which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="], + + "ora/chalk": ["chalk@5.4.1", "", {}, "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w=="], + + "ora/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], + + "ora/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], + + "p-locate/p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="], + + "path-scurry/lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], + + "pkg-dir/find-up": ["find-up@4.1.0", "", { "dependencies": { "locate-path": "^5.0.0", "path-exists": "^4.0.0" } }, "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw=="], + + "properties-reader/mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="], + + "readdir-glob/minimatch": ["minimatch@5.1.6", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g=="], + + "restore-cursor/signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="], + + "sass/immutable": ["immutable@5.1.2", "", {}, "sha512-qHKXW1q6liAk1Oys6umoaZbDRqjcjgSrbnrifHsfsttza7zcvRAsL7mMV6xWcyhwQy7Xj5v4hhbr6b+iDYwlmQ=="], + + "slice-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], + + "slice-ansi/is-fullwidth-code-point": ["is-fullwidth-code-point@4.0.0", "", {}, "sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ=="], + + "socket.io/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], + + "socket.io-adapter/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], + + "socket.io-adapter/ws": ["ws@8.17.1", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ=="], + + "socket.io-parser/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="], + + "tar/fs-minipass": ["fs-minipass@2.1.0", "", { "dependencies": { "minipass": "^3.0.0" } }, "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg=="], + + "tar/minipass": ["minipass@5.0.0", "", {}, "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ=="], + + "tar/minizlib": ["minizlib@2.1.2", "", { "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" } }, "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg=="], + + "tar/mkdirp": ["mkdirp@1.0.4", "", { "bin": { "mkdirp": "bin/cmd.js" } }, "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw=="], + + "wrap-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], + + "wrap-ansi/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], + + "wrap-ansi/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], + + "yauzl/buffer-crc32": ["buffer-crc32@0.2.13", "", {}, "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ=="], + + "@angular-devkit/core/ajv/json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="], + + "@angular/compiler-cli/@babel/core/convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], + + "@angular/compiler-cli/@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], + + "@angular/compiler-cli/yargs/cliui": ["cliui@9.0.1", "", { "dependencies": { "string-width": "^7.2.0", "strip-ansi": "^7.1.0", "wrap-ansi": "^9.0.0" } }, "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w=="], + + "@angular/compiler-cli/yargs/string-width": ["string-width@7.2.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="], + + "@angular/compiler-cli/yargs/yargs-parser": ["yargs-parser@22.0.0", "", {}, "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw=="], + + "@babel/helper-compilation-targets/lru-cache/yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="], + + "@isaacs/cliui/string-width/emoji-regex": ["emoji-regex@9.2.2", "", {}, "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="], + + "@isaacs/cliui/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], + + "@isaacs/cliui/wrap-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], + + "@listr2/prompt-adapter-inquirer/@inquirer/type/mute-stream": ["mute-stream@1.0.0", "", {}, "sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA=="], + + "@npmcli/git/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], + + "@npmcli/package-json/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], + + "@npmcli/promise-spawn/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], + + "@npmcli/run-script/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], + + "@tailwindcss/oxide/tar/chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], + + "@tailwindcss/oxide/tar/mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], + + "@tailwindcss/oxide/tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], + + "@tufjs/models/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + + "@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + + "ajv-formats/ajv/json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="], + + "archiver-utils/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], + + "body-parser/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], + + "cacache/glob/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="], + + "cacache/tar/chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], + + "cacache/tar/mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], + + "cacache/tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], + + "cli-truncate/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], + + "cli-truncate/string-width/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], + + "connect/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], + + "dockerode/tar-fs/chownr": ["chownr@1.1.4", "", {}, "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="], + + "dockerode/tar-fs/tar-stream": ["tar-stream@2.2.0", "", { "dependencies": { "bl": "^4.0.3", "end-of-stream": "^1.4.1", "fs-constants": "^1.0.0", "inherits": "^2.0.3", "readable-stream": "^3.1.1" } }, "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ=="], + + "finalhandler/debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="], + + "find-cache-dir/make-dir/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], + + "ignore-walk/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + + "istanbul-lib-instrument/@babel/core/convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], + + "istanbul-lib-instrument/@babel/core/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], + + "karma-coverage/istanbul-lib-instrument/@babel/core": ["@babel/core@7.27.4", "", { "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.27.3", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.27.3", "@babel/helpers": "^7.27.4", "@babel/parser": "^7.27.4", "@babel/template": "^7.27.2", "@babel/traverse": "^7.27.4", "@babel/types": "^7.27.3", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-bXYxrXFubeYdvB0NhD/NBB3Qi6aZeV20GOWVI47t2dkecCEoneR4NPVcb7abpXDEvejgrUfFtG6vG/zxAKmg+g=="], + + "karma-coverage/istanbul-lib-instrument/semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], + + "karma/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="], + + "karma/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="], + + "karma/yargs/cliui": ["cliui@7.0.4", "", { "dependencies": { "string-width": "^4.2.0", "strip-ansi": "^6.0.0", "wrap-ansi": "^7.0.0" } }, "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ=="], + + "karma/yargs/yargs-parser": ["yargs-parser@20.2.9", "", {}, "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w=="], + + "lazystream/readable-stream/isarray": ["isarray@1.0.0", "", {}, "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="], + + "lazystream/readable-stream/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="], + + "lazystream/readable-stream/string_decoder": ["string_decoder@1.1.1", "", { "dependencies": { "safe-buffer": "~5.1.0" } }, "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="], + + "log-update/slice-ansi/ansi-styles": ["ansi-styles@6.2.1", "", {}, "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug=="], + + "log-update/slice-ansi/is-fullwidth-code-point": ["is-fullwidth-code-point@5.0.0", "", { "dependencies": { "get-east-asian-width": "^1.0.0" } }, "sha512-OVa3u9kkBbw7b8Xw5F9P+D/T9X+Z4+JruYVNapTjPYZYUznQ5YfWeFkOj606XYYW8yugTfC8Pj0hYqvi4ryAhA=="], + + "log-update/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], + + "mongodb-memory-server-core/mongodb/bson": ["bson@5.5.1", "", {}, "sha512-ix0EwukN2EpC0SRWIj/7B5+A6uQMQy6KMREI9qQqvgpkV2frH63T0UDVd1SYedL6dNCmDBYB3QtXi4ISk9YT+g=="], + + "mongodb-memory-server-core/mongodb/mongodb-connection-string-url": ["mongodb-connection-string-url@2.6.0", "", { "dependencies": { "@types/whatwg-url": "^8.2.1", "whatwg-url": "^11.0.0" } }, "sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ=="], + + "node-gyp/tar/chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="], + + "node-gyp/tar/mkdirp": ["mkdirp@3.0.1", "", { "bin": { "mkdirp": "dist/cjs/src/bin.js" } }, "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg=="], + + "node-gyp/tar/yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="], + + "node-gyp/which/isexe": ["isexe@3.1.1", "", {}, "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ=="], + + "ora/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], + + "ora/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], + + "p-locate/p-limit/yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="], + + "pkg-dir/find-up/locate-path": ["locate-path@5.0.0", "", { "dependencies": { "p-locate": "^4.1.0" } }, "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g=="], + + "readdir-glob/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + + "tar/fs-minipass/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], + + "tar/minizlib/minipass": ["minipass@3.3.6", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw=="], + + "wrap-ansi/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], + + "wrap-ansi/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], + + "@angular/compiler-cli/yargs/cliui/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], + + "@angular/compiler-cli/yargs/string-width/emoji-regex": ["emoji-regex@10.4.0", "", {}, "sha512-EC+0oUMY1Rqm4O6LLrgjtYDvcVYTy7chDnM4Q7030tP4Kwj3u/pR6gP9ygnp2CJMK5Gq+9Q2oqmrFJAz01DXjw=="], + + "@angular/compiler-cli/yargs/string-width/strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="], + + "@npmcli/package-json/glob/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + + "archiver-utils/glob/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + + "cacache/glob/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="], + + "cli-truncate/string-width/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], + + "dockerode/tar-fs/tar-stream/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="], + + "karma-coverage/istanbul-lib-instrument/@babel/core/convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], + + "karma/chokidar/readdirp/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="], + + "karma/yargs/cliui/wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="], + + "mongodb-memory-server-core/mongodb/mongodb-connection-string-url/@types/whatwg-url": ["@types/whatwg-url@8.2.2", "", { "dependencies": { "@types/node": "*", "@types/webidl-conversions": "*" } }, "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA=="], + + "mongodb-memory-server-core/mongodb/mongodb-connection-string-url/whatwg-url": ["whatwg-url@11.0.0", "", { "dependencies": { "tr46": "^3.0.0", "webidl-conversions": "^7.0.0" } }, "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ=="], + + "pkg-dir/find-up/locate-path/p-locate": ["p-locate@4.1.0", "", { "dependencies": { "p-limit": "^2.2.0" } }, "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A=="], + + "@angular/compiler-cli/yargs/cliui/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], + + "@angular/compiler-cli/yargs/string-width/strip-ansi/ansi-regex": ["ansi-regex@6.1.0", "", {}, "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA=="], + + "mongodb-memory-server-core/mongodb/mongodb-connection-string-url/whatwg-url/tr46": ["tr46@3.0.0", "", { "dependencies": { "punycode": "^2.1.1" } }, "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA=="], + + "pkg-dir/find-up/locate-path/p-locate/p-limit": ["p-limit@2.3.0", "", { "dependencies": { "p-try": "^2.0.0" } }, "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w=="], + } +} diff --git a/bunfig.toml b/bunfig.toml index f4eed30..786dd97 100644 --- a/bunfig.toml +++ b/bunfig.toml @@ -1,27 +1,27 @@ -# Root bunfig.toml for Stock Bot Trading Platform -# Configures Bun for the entire monorepo workspace -# Look for packages in workspace root - -peer = true -workspaces = true - -[install.scopes] -"@stock-bot" = { registry = "file:../../" } - -[test] -# Configure coverage and test behavior -coverage = true -timeout = "30s" - -# Configure test environment -preload = ["./test/setup.ts"] - -# Environment variables for tests -[test.env] -NODE_ENV = "test" - -# Module path resolution -[bun] -paths = { - "@stock-bot/*" = ["./libs/*/src"] -} +# Root bunfig.toml for Stock Bot Trading Platform +# Configures Bun for the entire monorepo workspace +# Look for packages in workspace root + +peer = true +workspaces = true + +[install.scopes] +"@stock-bot" = { registry = "file:../../" } + +[test] +# Configure coverage and test behavior +coverage = true +timeout = "30s" + +# Configure test environment +preload = ["./test/setup.ts"] + +# Environment variables for tests +[test.env] +NODE_ENV = "test" + +# Module path resolution +[bun] +paths = { + "@stock-bot/*" = ["./libs/*/src"] +} diff --git a/database/mongodb/init/01-init-collections.js b/database/mongodb/init/01-init-collections.js index d9963d2..4923594 100644 --- a/database/mongodb/init/01-init-collections.js +++ b/database/mongodb/init/01-init-collections.js @@ -1,234 +1,234 @@ -// MongoDB Initialization Script for Trading Bot -// This script creates collections and indexes for sentiment and document storage - -// Switch to the trading_documents database -db = db.getSiblingDB('trading_documents'); - -// Create collections with validation schemas - -// Sentiment Analysis Collection -db.createCollection('sentiment_analysis', { - validator: { - $jsonSchema: { - bsonType: 'object', - required: ['symbol', 'source', 'timestamp', 'sentiment_score'], - properties: { - symbol: { - bsonType: 'string', - description: 'Stock symbol (e.g., AAPL, GOOGL)' - }, - source: { - bsonType: 'string', - description: 'Data source (news, social, earnings_call, etc.)' - }, - timestamp: { - bsonType: 'date', - description: 'When the sentiment was recorded' - }, - sentiment_score: { - bsonType: 'double', - minimum: -1.0, - maximum: 1.0, - description: 'Sentiment score between -1 (negative) and 1 (positive)' - }, - confidence: { - bsonType: 'double', - minimum: 0.0, - maximum: 1.0, - description: 'Confidence level of the sentiment analysis' - }, - text_snippet: { - bsonType: 'string', - description: 'Original text that was analyzed' - }, - metadata: { - bsonType: 'object', - description: 'Additional metadata about the sentiment source' - } - } - } - } -}); - -// Raw Documents Collection (for news articles, social media posts, etc.) -db.createCollection('raw_documents', { - validator: { - $jsonSchema: { - bsonType: 'object', - required: ['source', 'document_type', 'timestamp', 'content'], - properties: { - source: { - bsonType: 'string', - description: 'Document source (news_api, twitter, reddit, etc.)' - }, - document_type: { - bsonType: 'string', - enum: ['news_article', 'social_post', 'earnings_transcript', 'research_report', 'press_release'], - description: 'Type of document' - }, - timestamp: { - bsonType: 'date', - description: 'When the document was created/published' - }, - symbols: { - bsonType: 'array', - items: { - bsonType: 'string' - }, - description: 'Array of stock symbols mentioned in the document' - }, - title: { - bsonType: 'string', - description: 'Document title or headline' - }, - content: { - bsonType: 'string', - description: 'Full document content' - }, - url: { - bsonType: 'string', - description: 'Original URL of the document' - }, - author: { - bsonType: 'string', - description: 'Document author or source account' - }, - processed: { - bsonType: 'bool', - description: 'Whether this document has been processed for sentiment' - }, - metadata: { - bsonType: 'object', - description: 'Additional document metadata' - } - } - } - } -}); - -// Market Events Collection (for significant market events and their impact) -db.createCollection('market_events', { - validator: { - $jsonSchema: { - bsonType: 'object', - required: ['event_type', 'timestamp', 'description'], - properties: { - event_type: { - bsonType: 'string', - enum: ['earnings', 'merger', 'acquisition', 'ipo', 'dividend', 'split', 'regulatory', 'economic_indicator'], - description: 'Type of market event' - }, - timestamp: { - bsonType: 'date', - description: 'When the event occurred or was announced' - }, - symbols: { - bsonType: 'array', - items: { - bsonType: 'string' - }, - description: 'Stock symbols affected by this event' - }, - description: { - bsonType: 'string', - description: 'Event description' - }, - impact_score: { - bsonType: 'double', - minimum: -5.0, - maximum: 5.0, - description: 'Expected market impact score' - }, - source_documents: { - bsonType: 'array', - items: { - bsonType: 'objectId' - }, - description: 'References to raw_documents that reported this event' - } - } - } - } -}); - -// Create indexes for efficient querying - -// Sentiment Analysis indexes -db.sentiment_analysis.createIndex({ symbol: 1, timestamp: -1 }); -db.sentiment_analysis.createIndex({ source: 1, timestamp: -1 }); -db.sentiment_analysis.createIndex({ timestamp: -1 }); -db.sentiment_analysis.createIndex({ symbol: 1, source: 1, timestamp: -1 }); - -// Raw Documents indexes -db.raw_documents.createIndex({ symbols: 1, timestamp: -1 }); -db.raw_documents.createIndex({ source: 1, timestamp: -1 }); -db.raw_documents.createIndex({ document_type: 1, timestamp: -1 }); -db.raw_documents.createIndex({ processed: 1, timestamp: -1 }); -db.raw_documents.createIndex({ timestamp: -1 }); - -// Market Events indexes -db.market_events.createIndex({ symbols: 1, timestamp: -1 }); -db.market_events.createIndex({ event_type: 1, timestamp: -1 }); -db.market_events.createIndex({ timestamp: -1 }); - -// Insert some sample data for testing - -// Sample sentiment data -db.sentiment_analysis.insertMany([ - { - symbol: 'AAPL', - source: 'news_analysis', - timestamp: new Date(), - sentiment_score: 0.75, - confidence: 0.89, - text_snippet: 'Apple reports strong quarterly earnings...', - metadata: { - article_id: 'news_001', - provider: 'financial_news_api' - } - }, - { - symbol: 'GOOGL', - source: 'social_media', - timestamp: new Date(), - sentiment_score: -0.25, - confidence: 0.67, - text_snippet: 'Concerns about Google AI regulation...', - metadata: { - platform: 'twitter', - engagement_score: 450 - } - } -]); - -// Sample raw document -db.raw_documents.insertOne({ - source: 'financial_news_api', - document_type: 'news_article', - timestamp: new Date(), - symbols: ['AAPL', 'MSFT'], - title: 'Tech Giants Show Strong Q4 Performance', - content: 'Apple and Microsoft both reported better than expected earnings for Q4...', - url: 'https://example.com/tech-earnings-q4', - author: 'Financial Reporter', - processed: true, - metadata: { - word_count: 850, - readability_score: 0.75 - } -}); - -// Sample market event -db.market_events.insertOne({ - event_type: 'earnings', - timestamp: new Date(), - symbols: ['AAPL'], - description: 'Apple Q4 2024 Earnings Report', - impact_score: 2.5, - source_documents: [] -}); - -print('MongoDB initialization completed successfully!'); -print('Created collections: sentiment_analysis, raw_documents, market_events'); -print('Created indexes for efficient querying'); -print('Inserted sample data for testing'); +// MongoDB Initialization Script for Trading Bot +// This script creates collections and indexes for sentiment and document storage + +// Switch to the trading_documents database +db = db.getSiblingDB('trading_documents'); + +// Create collections with validation schemas + +// Sentiment Analysis Collection +db.createCollection('sentiment_analysis', { + validator: { + $jsonSchema: { + bsonType: 'object', + required: ['symbol', 'source', 'timestamp', 'sentiment_score'], + properties: { + symbol: { + bsonType: 'string', + description: 'Stock symbol (e.g., AAPL, GOOGL)' + }, + source: { + bsonType: 'string', + description: 'Data source (news, social, earnings_call, etc.)' + }, + timestamp: { + bsonType: 'date', + description: 'When the sentiment was recorded' + }, + sentiment_score: { + bsonType: 'double', + minimum: -1.0, + maximum: 1.0, + description: 'Sentiment score between -1 (negative) and 1 (positive)' + }, + confidence: { + bsonType: 'double', + minimum: 0.0, + maximum: 1.0, + description: 'Confidence level of the sentiment analysis' + }, + text_snippet: { + bsonType: 'string', + description: 'Original text that was analyzed' + }, + metadata: { + bsonType: 'object', + description: 'Additional metadata about the sentiment source' + } + } + } + } +}); + +// Raw Documents Collection (for news articles, social media posts, etc.) +db.createCollection('raw_documents', { + validator: { + $jsonSchema: { + bsonType: 'object', + required: ['source', 'document_type', 'timestamp', 'content'], + properties: { + source: { + bsonType: 'string', + description: 'Document source (news_api, twitter, reddit, etc.)' + }, + document_type: { + bsonType: 'string', + enum: ['news_article', 'social_post', 'earnings_transcript', 'research_report', 'press_release'], + description: 'Type of document' + }, + timestamp: { + bsonType: 'date', + description: 'When the document was created/published' + }, + symbols: { + bsonType: 'array', + items: { + bsonType: 'string' + }, + description: 'Array of stock symbols mentioned in the document' + }, + title: { + bsonType: 'string', + description: 'Document title or headline' + }, + content: { + bsonType: 'string', + description: 'Full document content' + }, + url: { + bsonType: 'string', + description: 'Original URL of the document' + }, + author: { + bsonType: 'string', + description: 'Document author or source account' + }, + processed: { + bsonType: 'bool', + description: 'Whether this document has been processed for sentiment' + }, + metadata: { + bsonType: 'object', + description: 'Additional document metadata' + } + } + } + } +}); + +// Market Events Collection (for significant market events and their impact) +db.createCollection('market_events', { + validator: { + $jsonSchema: { + bsonType: 'object', + required: ['event_type', 'timestamp', 'description'], + properties: { + event_type: { + bsonType: 'string', + enum: ['earnings', 'merger', 'acquisition', 'ipo', 'dividend', 'split', 'regulatory', 'economic_indicator'], + description: 'Type of market event' + }, + timestamp: { + bsonType: 'date', + description: 'When the event occurred or was announced' + }, + symbols: { + bsonType: 'array', + items: { + bsonType: 'string' + }, + description: 'Stock symbols affected by this event' + }, + description: { + bsonType: 'string', + description: 'Event description' + }, + impact_score: { + bsonType: 'double', + minimum: -5.0, + maximum: 5.0, + description: 'Expected market impact score' + }, + source_documents: { + bsonType: 'array', + items: { + bsonType: 'objectId' + }, + description: 'References to raw_documents that reported this event' + } + } + } + } +}); + +// Create indexes for efficient querying + +// Sentiment Analysis indexes +db.sentiment_analysis.createIndex({ symbol: 1, timestamp: -1 }); +db.sentiment_analysis.createIndex({ source: 1, timestamp: -1 }); +db.sentiment_analysis.createIndex({ timestamp: -1 }); +db.sentiment_analysis.createIndex({ symbol: 1, source: 1, timestamp: -1 }); + +// Raw Documents indexes +db.raw_documents.createIndex({ symbols: 1, timestamp: -1 }); +db.raw_documents.createIndex({ source: 1, timestamp: -1 }); +db.raw_documents.createIndex({ document_type: 1, timestamp: -1 }); +db.raw_documents.createIndex({ processed: 1, timestamp: -1 }); +db.raw_documents.createIndex({ timestamp: -1 }); + +// Market Events indexes +db.market_events.createIndex({ symbols: 1, timestamp: -1 }); +db.market_events.createIndex({ event_type: 1, timestamp: -1 }); +db.market_events.createIndex({ timestamp: -1 }); + +// Insert some sample data for testing + +// Sample sentiment data +db.sentiment_analysis.insertMany([ + { + symbol: 'AAPL', + source: 'news_analysis', + timestamp: new Date(), + sentiment_score: 0.75, + confidence: 0.89, + text_snippet: 'Apple reports strong quarterly earnings...', + metadata: { + article_id: 'news_001', + provider: 'financial_news_api' + } + }, + { + symbol: 'GOOGL', + source: 'social_media', + timestamp: new Date(), + sentiment_score: -0.25, + confidence: 0.67, + text_snippet: 'Concerns about Google AI regulation...', + metadata: { + platform: 'twitter', + engagement_score: 450 + } + } +]); + +// Sample raw document +db.raw_documents.insertOne({ + source: 'financial_news_api', + document_type: 'news_article', + timestamp: new Date(), + symbols: ['AAPL', 'MSFT'], + title: 'Tech Giants Show Strong Q4 Performance', + content: 'Apple and Microsoft both reported better than expected earnings for Q4...', + url: 'https://example.com/tech-earnings-q4', + author: 'Financial Reporter', + processed: true, + metadata: { + word_count: 850, + readability_score: 0.75 + } +}); + +// Sample market event +db.market_events.insertOne({ + event_type: 'earnings', + timestamp: new Date(), + symbols: ['AAPL'], + description: 'Apple Q4 2024 Earnings Report', + impact_score: 2.5, + source_documents: [] +}); + +print('MongoDB initialization completed successfully!'); +print('Created collections: sentiment_analysis, raw_documents, market_events'); +print('Created indexes for efficient querying'); +print('Inserted sample data for testing'); diff --git a/database/postgres/init/01-init-schemas.sql b/database/postgres/init/01-init-schemas.sql index 42821fb..912453b 100644 --- a/database/postgres/init/01-init-schemas.sql +++ b/database/postgres/init/01-init-schemas.sql @@ -1,20 +1,20 @@ --- Trading Bot Database Schema Initialization - --- Create schemas -CREATE SCHEMA IF NOT EXISTS trading; -CREATE SCHEMA IF NOT EXISTS strategy; -CREATE SCHEMA IF NOT EXISTS risk; -CREATE SCHEMA IF NOT EXISTS audit; - --- Set search path for the database -ALTER DATABASE trading_bot SET search_path TO trading, strategy, risk, audit, public; - --- Create extensions -CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; -CREATE EXTENSION IF NOT EXISTS "btree_gin"; -CREATE EXTENSION IF NOT EXISTS "pg_stat_statements"; - --- Create a read-only user for analytics -CREATE USER trading_reader WITH PASSWORD 'reader_pass_dev'; -GRANT CONNECT ON DATABASE trading_bot TO trading_reader; -GRANT USAGE ON SCHEMA trading, strategy, risk, audit TO trading_reader; +-- Trading Bot Database Schema Initialization + +-- Create schemas +CREATE SCHEMA IF NOT EXISTS trading; +CREATE SCHEMA IF NOT EXISTS strategy; +CREATE SCHEMA IF NOT EXISTS risk; +CREATE SCHEMA IF NOT EXISTS audit; + +-- Set search path for the database +ALTER DATABASE trading_bot SET search_path TO trading, strategy, risk, audit, public; + +-- Create extensions +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +CREATE EXTENSION IF NOT EXISTS "btree_gin"; +CREATE EXTENSION IF NOT EXISTS "pg_stat_statements"; + +-- Create a read-only user for analytics +CREATE USER trading_reader WITH PASSWORD 'reader_pass_dev'; +GRANT CONNECT ON DATABASE trading_bot TO trading_reader; +GRANT USAGE ON SCHEMA trading, strategy, risk, audit TO trading_reader; diff --git a/database/postgres/init/02-trading-tables.sql b/database/postgres/init/02-trading-tables.sql index 41d18f3..64409c7 100644 --- a/database/postgres/init/02-trading-tables.sql +++ b/database/postgres/init/02-trading-tables.sql @@ -1,93 +1,93 @@ --- Core trading tables - --- Symbols and instruments -CREATE TABLE trading.symbols ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - symbol VARCHAR(20) NOT NULL UNIQUE, - name VARCHAR(255), - exchange VARCHAR(50), - asset_type VARCHAR(20) DEFAULT 'equity', - sector VARCHAR(100), - is_active BOOLEAN DEFAULT true, - metadata JSONB DEFAULT '{}', - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Orders -CREATE TABLE trading.orders ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - symbol_id UUID REFERENCES trading.symbols(id), - strategy_id UUID, - order_type VARCHAR(20) NOT NULL, -- 'market', 'limit', 'stop', etc. - side VARCHAR(10) NOT NULL CHECK (side IN ('buy', 'sell')), - quantity DECIMAL(18,8) NOT NULL CHECK (quantity > 0), - price DECIMAL(18,8), - stop_price DECIMAL(18,8), - status VARCHAR(20) DEFAULT 'pending' CHECK (status IN ('pending', 'submitted', 'filled', 'cancelled', 'rejected')), - broker_order_id VARCHAR(100), - filled_quantity DECIMAL(18,8) DEFAULT 0, - avg_fill_price DECIMAL(18,8), - commission DECIMAL(18,8) DEFAULT 0, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - CONSTRAINT valid_prices CHECK ( - (order_type = 'market') OR - (order_type = 'limit' AND price IS NOT NULL) OR - (order_type = 'stop' AND stop_price IS NOT NULL) - ) -); - --- Positions -CREATE TABLE trading.positions ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - symbol_id UUID REFERENCES trading.symbols(id), - strategy_id UUID, - quantity DECIMAL(18,8) NOT NULL, - avg_cost DECIMAL(18,8) NOT NULL, - market_value DECIMAL(18,8), - unrealized_pnl DECIMAL(18,8), - realized_pnl DECIMAL(18,8) DEFAULT 0, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - UNIQUE(symbol_id, strategy_id) -); - --- Executions/Fills -CREATE TABLE trading.executions ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - order_id UUID REFERENCES trading.orders(id), - symbol_id UUID REFERENCES trading.symbols(id), - side VARCHAR(10) NOT NULL CHECK (side IN ('buy', 'sell')), - quantity DECIMAL(18,8) NOT NULL, - price DECIMAL(18,8) NOT NULL, - commission DECIMAL(18,8) DEFAULT 0, - broker_execution_id VARCHAR(100), - executed_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Accounts/Portfolios -CREATE TABLE trading.accounts ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - name VARCHAR(255) NOT NULL, - account_type VARCHAR(50) DEFAULT 'paper', -- 'paper', 'live' - broker VARCHAR(50), - cash_balance DECIMAL(18,2) DEFAULT 0, - buying_power DECIMAL(18,2) DEFAULT 0, - total_value DECIMAL(18,2) DEFAULT 0, - is_active BOOLEAN DEFAULT true, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Create indexes for performance -CREATE INDEX idx_orders_symbol_created ON trading.orders(symbol_id, created_at); -CREATE INDEX idx_orders_status ON trading.orders(status); -CREATE INDEX idx_orders_strategy ON trading.orders(strategy_id); -CREATE INDEX idx_positions_strategy ON trading.positions(strategy_id); -CREATE INDEX idx_executions_order ON trading.executions(order_id); -CREATE INDEX idx_executions_symbol_time ON trading.executions(symbol_id, executed_at); - --- Grant permissions to reader -GRANT SELECT ON ALL TABLES IN SCHEMA trading TO trading_reader; +-- Core trading tables + +-- Symbols and instruments +CREATE TABLE trading.symbols ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + symbol VARCHAR(20) NOT NULL UNIQUE, + name VARCHAR(255), + exchange VARCHAR(50), + asset_type VARCHAR(20) DEFAULT 'equity', + sector VARCHAR(100), + is_active BOOLEAN DEFAULT true, + metadata JSONB DEFAULT '{}', + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Orders +CREATE TABLE trading.orders ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + symbol_id UUID REFERENCES trading.symbols(id), + strategy_id UUID, + order_type VARCHAR(20) NOT NULL, -- 'market', 'limit', 'stop', etc. + side VARCHAR(10) NOT NULL CHECK (side IN ('buy', 'sell')), + quantity DECIMAL(18,8) NOT NULL CHECK (quantity > 0), + price DECIMAL(18,8), + stop_price DECIMAL(18,8), + status VARCHAR(20) DEFAULT 'pending' CHECK (status IN ('pending', 'submitted', 'filled', 'cancelled', 'rejected')), + broker_order_id VARCHAR(100), + filled_quantity DECIMAL(18,8) DEFAULT 0, + avg_fill_price DECIMAL(18,8), + commission DECIMAL(18,8) DEFAULT 0, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + CONSTRAINT valid_prices CHECK ( + (order_type = 'market') OR + (order_type = 'limit' AND price IS NOT NULL) OR + (order_type = 'stop' AND stop_price IS NOT NULL) + ) +); + +-- Positions +CREATE TABLE trading.positions ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + symbol_id UUID REFERENCES trading.symbols(id), + strategy_id UUID, + quantity DECIMAL(18,8) NOT NULL, + avg_cost DECIMAL(18,8) NOT NULL, + market_value DECIMAL(18,8), + unrealized_pnl DECIMAL(18,8), + realized_pnl DECIMAL(18,8) DEFAULT 0, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + UNIQUE(symbol_id, strategy_id) +); + +-- Executions/Fills +CREATE TABLE trading.executions ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + order_id UUID REFERENCES trading.orders(id), + symbol_id UUID REFERENCES trading.symbols(id), + side VARCHAR(10) NOT NULL CHECK (side IN ('buy', 'sell')), + quantity DECIMAL(18,8) NOT NULL, + price DECIMAL(18,8) NOT NULL, + commission DECIMAL(18,8) DEFAULT 0, + broker_execution_id VARCHAR(100), + executed_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Accounts/Portfolios +CREATE TABLE trading.accounts ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + name VARCHAR(255) NOT NULL, + account_type VARCHAR(50) DEFAULT 'paper', -- 'paper', 'live' + broker VARCHAR(50), + cash_balance DECIMAL(18,2) DEFAULT 0, + buying_power DECIMAL(18,2) DEFAULT 0, + total_value DECIMAL(18,2) DEFAULT 0, + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Create indexes for performance +CREATE INDEX idx_orders_symbol_created ON trading.orders(symbol_id, created_at); +CREATE INDEX idx_orders_status ON trading.orders(status); +CREATE INDEX idx_orders_strategy ON trading.orders(strategy_id); +CREATE INDEX idx_positions_strategy ON trading.positions(strategy_id); +CREATE INDEX idx_executions_order ON trading.executions(order_id); +CREATE INDEX idx_executions_symbol_time ON trading.executions(symbol_id, executed_at); + +-- Grant permissions to reader +GRANT SELECT ON ALL TABLES IN SCHEMA trading TO trading_reader; diff --git a/database/postgres/init/03-strategy-risk-tables.sql b/database/postgres/init/03-strategy-risk-tables.sql index 62b88d2..7824ec8 100644 --- a/database/postgres/init/03-strategy-risk-tables.sql +++ b/database/postgres/init/03-strategy-risk-tables.sql @@ -1,105 +1,105 @@ --- Strategy and Risk Management Tables - --- Strategies -CREATE TABLE strategy.strategies ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - name VARCHAR(255) NOT NULL, - description TEXT, - version VARCHAR(20) DEFAULT '1.0.0', - config JSONB DEFAULT '{}', - parameters JSONB DEFAULT '{}', - is_active BOOLEAN DEFAULT false, - is_enabled BOOLEAN DEFAULT true, - created_by VARCHAR(255), - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Strategy executions/runs -CREATE TABLE strategy.executions ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - strategy_id UUID REFERENCES strategy.strategies(id), - status VARCHAR(20) DEFAULT 'running' CHECK (status IN ('running', 'stopped', 'error', 'completed')), - started_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - stopped_at TIMESTAMP WITH TIME ZONE, - error_message TEXT, - execution_stats JSONB DEFAULT '{}', - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Strategy signals -CREATE TABLE strategy.signals ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - strategy_id UUID REFERENCES strategy.strategies(id), - symbol_id UUID REFERENCES trading.symbols(id), - signal_type VARCHAR(20) NOT NULL CHECK (signal_type IN ('buy', 'sell', 'hold')), - strength DECIMAL(3,2) CHECK (strength >= 0 AND strength <= 1), -- 0.0 to 1.0 - confidence DECIMAL(3,2) CHECK (confidence >= 0 AND confidence <= 1), - target_price DECIMAL(18,8), - stop_loss DECIMAL(18,8), - take_profit DECIMAL(18,8), - metadata JSONB DEFAULT '{}', - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Risk limits -CREATE TABLE risk.limits ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - strategy_id UUID REFERENCES strategy.strategies(id), - account_id UUID REFERENCES trading.accounts(id), - limit_type VARCHAR(50) NOT NULL, -- 'max_position_size', 'max_daily_loss', 'max_drawdown', etc. - limit_value DECIMAL(18,8) NOT NULL, - current_value DECIMAL(18,8) DEFAULT 0, - threshold_warning DECIMAL(18,8), -- Warning at X% of limit - is_active BOOLEAN DEFAULT true, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Risk events/alerts -CREATE TABLE risk.events ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - limit_id UUID REFERENCES risk.limits(id), - strategy_id UUID, - event_type VARCHAR(50) NOT NULL, -- 'warning', 'breach', 'resolved' - severity VARCHAR(20) DEFAULT 'medium' CHECK (severity IN ('low', 'medium', 'high', 'critical')), - message TEXT NOT NULL, - current_value DECIMAL(18,8), - limit_value DECIMAL(18,8), - metadata JSONB DEFAULT '{}', - acknowledged BOOLEAN DEFAULT false, - acknowledged_by VARCHAR(255), - acknowledged_at TIMESTAMP WITH TIME ZONE, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Performance tracking -CREATE TABLE strategy.performance ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - strategy_id UUID REFERENCES strategy.strategies(id), - date DATE NOT NULL, - total_return DECIMAL(10,4), - daily_return DECIMAL(10,4), - sharpe_ratio DECIMAL(10,4), - max_drawdown DECIMAL(10,4), - win_rate DECIMAL(5,4), - profit_factor DECIMAL(10,4), - total_trades INTEGER DEFAULT 0, - winning_trades INTEGER DEFAULT 0, - losing_trades INTEGER DEFAULT 0, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - UNIQUE(strategy_id, date) -); - --- Create indexes -CREATE INDEX idx_strategies_active ON strategy.strategies(is_active, is_enabled); -CREATE INDEX idx_executions_strategy ON strategy.executions(strategy_id); -CREATE INDEX idx_signals_strategy_time ON strategy.signals(strategy_id, created_at); -CREATE INDEX idx_signals_symbol ON strategy.signals(symbol_id); -CREATE INDEX idx_limits_strategy ON risk.limits(strategy_id); -CREATE INDEX idx_risk_events_severity ON risk.events(severity, created_at); -CREATE INDEX idx_performance_strategy_date ON strategy.performance(strategy_id, date); - --- Grant permissions -GRANT SELECT ON ALL TABLES IN SCHEMA strategy TO trading_reader; -GRANT SELECT ON ALL TABLES IN SCHEMA risk TO trading_reader; +-- Strategy and Risk Management Tables + +-- Strategies +CREATE TABLE strategy.strategies ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + name VARCHAR(255) NOT NULL, + description TEXT, + version VARCHAR(20) DEFAULT '1.0.0', + config JSONB DEFAULT '{}', + parameters JSONB DEFAULT '{}', + is_active BOOLEAN DEFAULT false, + is_enabled BOOLEAN DEFAULT true, + created_by VARCHAR(255), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Strategy executions/runs +CREATE TABLE strategy.executions ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + strategy_id UUID REFERENCES strategy.strategies(id), + status VARCHAR(20) DEFAULT 'running' CHECK (status IN ('running', 'stopped', 'error', 'completed')), + started_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + stopped_at TIMESTAMP WITH TIME ZONE, + error_message TEXT, + execution_stats JSONB DEFAULT '{}', + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Strategy signals +CREATE TABLE strategy.signals ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + strategy_id UUID REFERENCES strategy.strategies(id), + symbol_id UUID REFERENCES trading.symbols(id), + signal_type VARCHAR(20) NOT NULL CHECK (signal_type IN ('buy', 'sell', 'hold')), + strength DECIMAL(3,2) CHECK (strength >= 0 AND strength <= 1), -- 0.0 to 1.0 + confidence DECIMAL(3,2) CHECK (confidence >= 0 AND confidence <= 1), + target_price DECIMAL(18,8), + stop_loss DECIMAL(18,8), + take_profit DECIMAL(18,8), + metadata JSONB DEFAULT '{}', + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Risk limits +CREATE TABLE risk.limits ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + strategy_id UUID REFERENCES strategy.strategies(id), + account_id UUID REFERENCES trading.accounts(id), + limit_type VARCHAR(50) NOT NULL, -- 'max_position_size', 'max_daily_loss', 'max_drawdown', etc. + limit_value DECIMAL(18,8) NOT NULL, + current_value DECIMAL(18,8) DEFAULT 0, + threshold_warning DECIMAL(18,8), -- Warning at X% of limit + is_active BOOLEAN DEFAULT true, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + updated_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Risk events/alerts +CREATE TABLE risk.events ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + limit_id UUID REFERENCES risk.limits(id), + strategy_id UUID, + event_type VARCHAR(50) NOT NULL, -- 'warning', 'breach', 'resolved' + severity VARCHAR(20) DEFAULT 'medium' CHECK (severity IN ('low', 'medium', 'high', 'critical')), + message TEXT NOT NULL, + current_value DECIMAL(18,8), + limit_value DECIMAL(18,8), + metadata JSONB DEFAULT '{}', + acknowledged BOOLEAN DEFAULT false, + acknowledged_by VARCHAR(255), + acknowledged_at TIMESTAMP WITH TIME ZONE, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Performance tracking +CREATE TABLE strategy.performance ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + strategy_id UUID REFERENCES strategy.strategies(id), + date DATE NOT NULL, + total_return DECIMAL(10,4), + daily_return DECIMAL(10,4), + sharpe_ratio DECIMAL(10,4), + max_drawdown DECIMAL(10,4), + win_rate DECIMAL(5,4), + profit_factor DECIMAL(10,4), + total_trades INTEGER DEFAULT 0, + winning_trades INTEGER DEFAULT 0, + losing_trades INTEGER DEFAULT 0, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + UNIQUE(strategy_id, date) +); + +-- Create indexes +CREATE INDEX idx_strategies_active ON strategy.strategies(is_active, is_enabled); +CREATE INDEX idx_executions_strategy ON strategy.executions(strategy_id); +CREATE INDEX idx_signals_strategy_time ON strategy.signals(strategy_id, created_at); +CREATE INDEX idx_signals_symbol ON strategy.signals(symbol_id); +CREATE INDEX idx_limits_strategy ON risk.limits(strategy_id); +CREATE INDEX idx_risk_events_severity ON risk.events(severity, created_at); +CREATE INDEX idx_performance_strategy_date ON strategy.performance(strategy_id, date); + +-- Grant permissions +GRANT SELECT ON ALL TABLES IN SCHEMA strategy TO trading_reader; +GRANT SELECT ON ALL TABLES IN SCHEMA risk TO trading_reader; diff --git a/database/postgres/init/04-audit-tables.sql b/database/postgres/init/04-audit-tables.sql index 3f1d073..1371520 100644 --- a/database/postgres/init/04-audit-tables.sql +++ b/database/postgres/init/04-audit-tables.sql @@ -1,59 +1,59 @@ --- Audit and System Tables - --- System events audit -CREATE TABLE audit.system_events ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - service_name VARCHAR(100) NOT NULL, - event_type VARCHAR(50) NOT NULL, - event_data JSONB DEFAULT '{}', - user_id VARCHAR(255), - ip_address INET, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Trading events audit -CREATE TABLE audit.trading_events ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - event_type VARCHAR(50) NOT NULL, -- 'order_created', 'order_filled', 'position_opened', etc. - entity_type VARCHAR(50) NOT NULL, -- 'order', 'position', 'execution' - entity_id UUID NOT NULL, - old_values JSONB, - new_values JSONB, - changed_by VARCHAR(255), - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Service health monitoring -CREATE TABLE audit.service_health ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - service_name VARCHAR(100) NOT NULL, - status VARCHAR(20) NOT NULL CHECK (status IN ('healthy', 'unhealthy', 'degraded')), - version VARCHAR(50), - uptime_seconds INTEGER, - memory_usage_mb INTEGER, - cpu_usage_percent DECIMAL(5,2), - last_health_check TIMESTAMP WITH TIME ZONE DEFAULT NOW(), - metadata JSONB DEFAULT '{}', - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Configuration changes -CREATE TABLE audit.config_changes ( - id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), - config_key VARCHAR(255) NOT NULL, - old_value TEXT, - new_value TEXT, - changed_by VARCHAR(255), - reason TEXT, - created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() -); - --- Create indexes -CREATE INDEX idx_system_events_service_time ON audit.system_events(service_name, created_at); -CREATE INDEX idx_trading_events_type_time ON audit.trading_events(event_type, created_at); -CREATE INDEX idx_trading_events_entity ON audit.trading_events(entity_type, entity_id); -CREATE INDEX idx_service_health_name_time ON audit.service_health(service_name, created_at); -CREATE INDEX idx_config_changes_key_time ON audit.config_changes(config_key, created_at); - --- Grant permissions -GRANT SELECT ON ALL TABLES IN SCHEMA audit TO trading_reader; +-- Audit and System Tables + +-- System events audit +CREATE TABLE audit.system_events ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + service_name VARCHAR(100) NOT NULL, + event_type VARCHAR(50) NOT NULL, + event_data JSONB DEFAULT '{}', + user_id VARCHAR(255), + ip_address INET, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Trading events audit +CREATE TABLE audit.trading_events ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + event_type VARCHAR(50) NOT NULL, -- 'order_created', 'order_filled', 'position_opened', etc. + entity_type VARCHAR(50) NOT NULL, -- 'order', 'position', 'execution' + entity_id UUID NOT NULL, + old_values JSONB, + new_values JSONB, + changed_by VARCHAR(255), + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Service health monitoring +CREATE TABLE audit.service_health ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + service_name VARCHAR(100) NOT NULL, + status VARCHAR(20) NOT NULL CHECK (status IN ('healthy', 'unhealthy', 'degraded')), + version VARCHAR(50), + uptime_seconds INTEGER, + memory_usage_mb INTEGER, + cpu_usage_percent DECIMAL(5,2), + last_health_check TIMESTAMP WITH TIME ZONE DEFAULT NOW(), + metadata JSONB DEFAULT '{}', + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Configuration changes +CREATE TABLE audit.config_changes ( + id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), + config_key VARCHAR(255) NOT NULL, + old_value TEXT, + new_value TEXT, + changed_by VARCHAR(255), + reason TEXT, + created_at TIMESTAMP WITH TIME ZONE DEFAULT NOW() +); + +-- Create indexes +CREATE INDEX idx_system_events_service_time ON audit.system_events(service_name, created_at); +CREATE INDEX idx_trading_events_type_time ON audit.trading_events(event_type, created_at); +CREATE INDEX idx_trading_events_entity ON audit.trading_events(entity_type, entity_id); +CREATE INDEX idx_service_health_name_time ON audit.service_health(service_name, created_at); +CREATE INDEX idx_config_changes_key_time ON audit.config_changes(config_key, created_at); + +-- Grant permissions +GRANT SELECT ON ALL TABLES IN SCHEMA audit TO trading_reader; diff --git a/database/postgres/init/05-initial-data.sql b/database/postgres/init/05-initial-data.sql index 47bfc35..b2e342a 100644 --- a/database/postgres/init/05-initial-data.sql +++ b/database/postgres/init/05-initial-data.sql @@ -1,55 +1,55 @@ --- Insert initial reference data - --- Insert common symbols -INSERT INTO trading.symbols (symbol, name, exchange, asset_type, sector) VALUES -('AAPL', 'Apple Inc.', 'NASDAQ', 'equity', 'Technology'), -('GOOGL', 'Alphabet Inc.', 'NASDAQ', 'equity', 'Technology'), -('MSFT', 'Microsoft Corporation', 'NASDAQ', 'equity', 'Technology'), -('AMZN', 'Amazon.com Inc.', 'NASDAQ', 'equity', 'Consumer Discretionary'), -('TSLA', 'Tesla Inc.', 'NASDAQ', 'equity', 'Consumer Discretionary'), -('NVDA', 'NVIDIA Corporation', 'NASDAQ', 'equity', 'Technology'), -('META', 'Meta Platforms Inc.', 'NASDAQ', 'equity', 'Technology'), -('NFLX', 'Netflix Inc.', 'NASDAQ', 'equity', 'Communication Services'), -('SPY', 'SPDR S&P 500 ETF Trust', 'NYSE', 'etf', 'Broad Market'), -('QQQ', 'Invesco QQQ Trust', 'NASDAQ', 'etf', 'Technology'), -('BTC-USD', 'Bitcoin USD', 'CRYPTO', 'cryptocurrency', 'Digital Assets'), -('ETH-USD', 'Ethereum USD', 'CRYPTO', 'cryptocurrency', 'Digital Assets'); - --- Insert default trading account -INSERT INTO trading.accounts (name, account_type, broker, cash_balance, buying_power, total_value) VALUES -('Demo Account', 'paper', 'demo', 100000.00, 100000.00, 100000.00); - --- Insert demo strategy -INSERT INTO strategy.strategies (name, description, config, parameters, is_active) VALUES -('Demo Mean Reversion', 'Simple mean reversion strategy for demonstration', - '{"timeframe": "1h", "lookback_period": 20}', - '{"rsi_oversold": 30, "rsi_overbought": 70, "position_size": 0.1}', - false); - --- Insert basic risk limits -INSERT INTO risk.limits (strategy_id, limit_type, limit_value, threshold_warning) -SELECT s.id, 'max_position_size', 10000.00, 8000.00 -FROM strategy.strategies s -WHERE s.name = 'Demo Mean Reversion'; - -INSERT INTO risk.limits (strategy_id, limit_type, limit_value, threshold_warning) -SELECT s.id, 'max_daily_loss', 5000.00, 4000.00 -FROM strategy.strategies s -WHERE s.name = 'Demo Mean Reversion'; - --- Create updated_at trigger function -CREATE OR REPLACE FUNCTION update_updated_at_column() -RETURNS TRIGGER AS $$ -BEGIN - NEW.updated_at = NOW(); - RETURN NEW; -END; -$$ language 'plpgsql'; - --- Apply updated_at triggers -CREATE TRIGGER update_symbols_updated_at BEFORE UPDATE ON trading.symbols FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -CREATE TRIGGER update_orders_updated_at BEFORE UPDATE ON trading.orders FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -CREATE TRIGGER update_positions_updated_at BEFORE UPDATE ON trading.positions FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -CREATE TRIGGER update_accounts_updated_at BEFORE UPDATE ON trading.accounts FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -CREATE TRIGGER update_strategies_updated_at BEFORE UPDATE ON strategy.strategies FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); -CREATE TRIGGER update_limits_updated_at BEFORE UPDATE ON risk.limits FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +-- Insert initial reference data + +-- Insert common symbols +INSERT INTO trading.symbols (symbol, name, exchange, asset_type, sector) VALUES +('AAPL', 'Apple Inc.', 'NASDAQ', 'equity', 'Technology'), +('GOOGL', 'Alphabet Inc.', 'NASDAQ', 'equity', 'Technology'), +('MSFT', 'Microsoft Corporation', 'NASDAQ', 'equity', 'Technology'), +('AMZN', 'Amazon.com Inc.', 'NASDAQ', 'equity', 'Consumer Discretionary'), +('TSLA', 'Tesla Inc.', 'NASDAQ', 'equity', 'Consumer Discretionary'), +('NVDA', 'NVIDIA Corporation', 'NASDAQ', 'equity', 'Technology'), +('META', 'Meta Platforms Inc.', 'NASDAQ', 'equity', 'Technology'), +('NFLX', 'Netflix Inc.', 'NASDAQ', 'equity', 'Communication Services'), +('SPY', 'SPDR S&P 500 ETF Trust', 'NYSE', 'etf', 'Broad Market'), +('QQQ', 'Invesco QQQ Trust', 'NASDAQ', 'etf', 'Technology'), +('BTC-USD', 'Bitcoin USD', 'CRYPTO', 'cryptocurrency', 'Digital Assets'), +('ETH-USD', 'Ethereum USD', 'CRYPTO', 'cryptocurrency', 'Digital Assets'); + +-- Insert default trading account +INSERT INTO trading.accounts (name, account_type, broker, cash_balance, buying_power, total_value) VALUES +('Demo Account', 'paper', 'demo', 100000.00, 100000.00, 100000.00); + +-- Insert demo strategy +INSERT INTO strategy.strategies (name, description, config, parameters, is_active) VALUES +('Demo Mean Reversion', 'Simple mean reversion strategy for demonstration', + '{"timeframe": "1h", "lookback_period": 20}', + '{"rsi_oversold": 30, "rsi_overbought": 70, "position_size": 0.1}', + false); + +-- Insert basic risk limits +INSERT INTO risk.limits (strategy_id, limit_type, limit_value, threshold_warning) +SELECT s.id, 'max_position_size', 10000.00, 8000.00 +FROM strategy.strategies s +WHERE s.name = 'Demo Mean Reversion'; + +INSERT INTO risk.limits (strategy_id, limit_type, limit_value, threshold_warning) +SELECT s.id, 'max_daily_loss', 5000.00, 4000.00 +FROM strategy.strategies s +WHERE s.name = 'Demo Mean Reversion'; + +-- Create updated_at trigger function +CREATE OR REPLACE FUNCTION update_updated_at_column() +RETURNS TRIGGER AS $$ +BEGIN + NEW.updated_at = NOW(); + RETURN NEW; +END; +$$ language 'plpgsql'; + +-- Apply updated_at triggers +CREATE TRIGGER update_symbols_updated_at BEFORE UPDATE ON trading.symbols FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +CREATE TRIGGER update_orders_updated_at BEFORE UPDATE ON trading.orders FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +CREATE TRIGGER update_positions_updated_at BEFORE UPDATE ON trading.positions FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +CREATE TRIGGER update_accounts_updated_at BEFORE UPDATE ON trading.accounts FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +CREATE TRIGGER update_strategies_updated_at BEFORE UPDATE ON strategy.strategies FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); +CREATE TRIGGER update_limits_updated_at BEFORE UPDATE ON risk.limits FOR EACH ROW EXECUTE FUNCTION update_updated_at_column(); diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 3930443..0f8cd84 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,34 +1,34 @@ -# Development override for Docker Compose -# Use: docker-compose -f docker-compose.yml -f docker-compose.dev.yml up - -version: '3.8' - -services: - # Development overrides for faster feedback - dragonfly: - command: > - dragonfly - --logtostderr - --cache_mode=true - --maxmemory=512mb - --save_schedule="" - --bind=0.0.0.0 - ports: - - "6379:6379" - - postgres: - environment: - POSTGRES_DB: trading_bot_dev - POSTGRES_USER: dev_user - POSTGRES_PASSWORD: dev_pass - ports: - - "5432:5432" - - # Disable monitoring in development to save resources - prometheus: - profiles: - - monitoring - - grafana: - profiles: - - monitoring +# Development override for Docker Compose +# Use: docker-compose -f docker-compose.yml -f docker-compose.dev.yml up + +version: '3.8' + +services: + # Development overrides for faster feedback + dragonfly: + command: > + dragonfly + --logtostderr + --cache_mode=true + --maxmemory=512mb + --save_schedule="" + --bind=0.0.0.0 + ports: + - "6379:6379" + + postgres: + environment: + POSTGRES_DB: trading_bot_dev + POSTGRES_USER: dev_user + POSTGRES_PASSWORD: dev_pass + ports: + - "5432:5432" + + # Disable monitoring in development to save resources + prometheus: + profiles: + - monitoring + + grafana: + profiles: + - monitoring diff --git a/docker-compose.yml b/docker-compose.yml index eab572d..d7fd504 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,244 +1,244 @@ -services: - # Dragonfly - Redis replacement for caching and events - dragonfly: - image: docker.dragonflydb.io/dragonflydb/dragonfly:latest - container_name: trading-bot-dragonfly - ports: - - "6379:6379" - deploy: - resources: - limits: - cpus: '4.0' # Use up to 4 CPU cores - memory: 8G # Use up to 8GB RAM - reservations: - cpus: '2.0' # Reserve at least 2 CPU cores - memory: 4G # Reserve at least 4GB RAM - command: - - dragonfly - - --logtostderr - - --cache_mode=true - - --maxmemory=8gb - - --proactor_threads=8 - - --bind=0.0.0.0 - - --admin_port=6380 - - --proactor_threads=8 # Number of I/O threads (CPU cores) - - --cache_mode=true # Enable cache mode for better performance - - --cluster_mode=emulated # Better for single-node performance - - --hz=500 # Increase background task frequency - - --tcp_keepalive=60 - - --conn_use_incoming_cpu=true # Better CPU utilization - - --lock_on_hashtags - - --default_lua_flags=allow-undeclared-keys - volumes: - - dragonfly_data:/data - restart: unless-stopped - healthcheck: - test: ["CMD", "redis-cli", "ping"] - interval: 30s - timeout: 10s - retries: 3 - networks: - - trading-bot-network - - # PostgreSQL - Operational data (orders, positions, strategies) - postgres: - image: postgres:16-alpine - container_name: trading-bot-postgres - environment: - POSTGRES_DB: trading_bot - POSTGRES_USER: trading_user - POSTGRES_PASSWORD: trading_pass_dev - POSTGRES_INITDB_ARGS: "--encoding=UTF-8" - ports: - - "5432:5432" - volumes: - - postgres_data:/var/lib/postgresql/data - - ./database/postgres/init:/docker-entrypoint-initdb.d - restart: unless-stopped - healthcheck: - test: ["CMD-SHELL", "pg_isready -U trading_user -d trading_bot"] - interval: 30s - timeout: 10s - retries: 3 - networks: - - trading-bot-network - - # QuestDB - Time-series data (OHLCV, indicators, performance) - questdb: - image: questdb/questdb:latest - container_name: trading-bot-questdb - ports: - - "9000:9000" # Web console - - "8812:8812" # PostgreSQL wire protocol - - "9009:9009" # InfluxDB line protocol - volumes: - - questdb_data:/var/lib/questdb - environment: - - QDB_TELEMETRY_ENABLED=false - restart: unless-stopped - networks: - - trading-bot-network - - # MongoDB - Document storage (sentiment, raw docs, unstructured data) - mongodb: - image: mongo:7-jammy - container_name: trading-bot-mongodb - environment: - MONGO_INITDB_ROOT_USERNAME: trading_admin - MONGO_INITDB_ROOT_PASSWORD: trading_mongo_dev - MONGO_INITDB_DATABASE: trading_documents - ports: - - "27017:27017" - volumes: - - mongodb_data:/data/db - - ./database/mongodb/init:/docker-entrypoint-initdb.d - restart: unless-stopped - healthcheck: - test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"] - interval: 30s - timeout: 10s - retries: 3 - networks: - - trading-bot-network - - # Redis Insight - GUI for Dragonfly debugging - redis-insight: - image: redislabs/redisinsight:latest - container_name: trading-bot-redis-insight - ports: - - "8001:8001" - environment: - - REDIS_HOSTS=local:dragonfly:6379 - depends_on: - - dragonfly - restart: unless-stopped - networks: - - trading-bot-network - - # PgAdmin - PostgreSQL GUI - pgadmin: - image: dpage/pgadmin4:latest - container_name: trading-bot-pgadmin - environment: - PGADMIN_DEFAULT_EMAIL: admin@tradingbot.local - PGADMIN_DEFAULT_PASSWORD: admin123 - PGADMIN_CONFIG_SERVER_MODE: 'False' - PGADMIN_DISABLE_POSTFIX: 'true' - ports: - - "8080:80" - volumes: - - pgadmin_data:/var/lib/pgadmin - depends_on: - - postgres - restart: unless-stopped - networks: - - trading-bot-network - - # Mongo Express - MongoDB GUI - mongo-express: - image: mongo-express:latest - container_name: trading-bot-mongo-express - environment: - ME_CONFIG_MONGODB_ADMINUSERNAME: trading_admin - ME_CONFIG_MONGODB_ADMINPASSWORD: trading_mongo_dev - ME_CONFIG_MONGODB_SERVER: mongodb - ME_CONFIG_MONGODB_PORT: 27017 - ME_CONFIG_BASICAUTH_USERNAME: admin - ME_CONFIG_BASICAUTH_PASSWORD: admin123 - ports: - - "8081:8081" - depends_on: - - mongodb - restart: unless-stopped - networks: - - trading-bot-network - - # Prometheus - Metrics collection - prometheus: - image: prom/prometheus:latest - container_name: trading-bot-prometheus - ports: - - "9090:9090" - volumes: - - ./monitoring/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml - - prometheus_data:/prometheus - command: - - '--config.file=/etc/prometheus/prometheus.yml' - - '--storage.tsdb.path=/prometheus' - - '--web.console.libraries=/etc/prometheus/console_libraries' - - '--web.console.templates=/etc/prometheus/consoles' - - '--web.enable-lifecycle' - restart: unless-stopped - networks: - - trading-bot-network - - # Loki - Log aggregation - loki: - image: grafana/loki:2.9.2 - container_name: trading-bot-loki - ports: - - "3100:3100" - volumes: - - loki_data:/loki - - ./monitoring/loki:/etc/loki - command: -config.file=/etc/loki/loki-config.yaml - healthcheck: - test: ["CMD", "wget", "-q", "--spider", "http://localhost:3100/ready"] - interval: 30s - timeout: 10s - retries: 3 - restart: unless-stopped - networks: - - trading-bot-network - # Grafana - Visualization for logs and metrics - grafana: - image: grafana/grafana:10.2.0 - container_name: trading-bot-grafana - ports: - - "3000:3000" - environment: - - GF_SECURITY_ADMIN_PASSWORD=admin - - GF_SECURITY_ADMIN_USER=admin - - GF_PATHS_PROVISIONING=/etc/grafana/provisioning - - GF_USERS_ALLOW_SIGN_UP=false - volumes: - - grafana_data:/var/lib/grafana - - ./monitoring/grafana/provisioning:/etc/grafana/provisioning - depends_on: - - prometheus - - loki - restart: unless-stopped - networks: - - trading-bot-network - - # Bull Board - Queue monitoring - bull-board: - image: deadly0/bull-board - container_name: trading-bot-bull-board - ports: - - "3001:3000" - environment: - - REDIS_HOST=dragonfly - - REDIS_PORT=6379 - - REDIS_PASSWORD= - - REDIS_DB=0 - - REDIS_URL=redis://dragonfly:6379 - depends_on: - - dragonfly - restart: unless-stopped - networks: - - trading-bot-network - -volumes: - postgres_data: - questdb_data: - dragonfly_data: - mongodb_data: - pgadmin_data: - prometheus_data: - grafana_data: - loki_data: - -networks: - trading-bot-network: - driver: bridge +services: + # Dragonfly - Redis replacement for caching and events + dragonfly: + image: docker.dragonflydb.io/dragonflydb/dragonfly:latest + container_name: trading-bot-dragonfly + ports: + - "6379:6379" + deploy: + resources: + limits: + cpus: '4.0' # Use up to 4 CPU cores + memory: 8G # Use up to 8GB RAM + reservations: + cpus: '2.0' # Reserve at least 2 CPU cores + memory: 4G # Reserve at least 4GB RAM + command: + - dragonfly + - --logtostderr + - --cache_mode=true + - --maxmemory=8gb + - --proactor_threads=8 + - --bind=0.0.0.0 + - --admin_port=6380 + - --proactor_threads=8 # Number of I/O threads (CPU cores) + - --cache_mode=true # Enable cache mode for better performance + - --cluster_mode=emulated # Better for single-node performance + - --hz=500 # Increase background task frequency + - --tcp_keepalive=60 + - --conn_use_incoming_cpu=true # Better CPU utilization + - --lock_on_hashtags + - --default_lua_flags=allow-undeclared-keys + volumes: + - dragonfly_data:/data + restart: unless-stopped + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 30s + timeout: 10s + retries: 3 + networks: + - trading-bot-network + + # PostgreSQL - Operational data (orders, positions, strategies) + postgres: + image: postgres:16-alpine + container_name: trading-bot-postgres + environment: + POSTGRES_DB: trading_bot + POSTGRES_USER: trading_user + POSTGRES_PASSWORD: trading_pass_dev + POSTGRES_INITDB_ARGS: "--encoding=UTF-8" + ports: + - "5432:5432" + volumes: + - postgres_data:/var/lib/postgresql/data + - ./database/postgres/init:/docker-entrypoint-initdb.d + restart: unless-stopped + healthcheck: + test: ["CMD-SHELL", "pg_isready -U trading_user -d trading_bot"] + interval: 30s + timeout: 10s + retries: 3 + networks: + - trading-bot-network + + # QuestDB - Time-series data (OHLCV, indicators, performance) + questdb: + image: questdb/questdb:latest + container_name: trading-bot-questdb + ports: + - "9000:9000" # Web console + - "8812:8812" # PostgreSQL wire protocol + - "9009:9009" # InfluxDB line protocol + volumes: + - questdb_data:/var/lib/questdb + environment: + - QDB_TELEMETRY_ENABLED=false + restart: unless-stopped + networks: + - trading-bot-network + + # MongoDB - Document storage (sentiment, raw docs, unstructured data) + mongodb: + image: mongo:7-jammy + container_name: trading-bot-mongodb + environment: + MONGO_INITDB_ROOT_USERNAME: trading_admin + MONGO_INITDB_ROOT_PASSWORD: trading_mongo_dev + MONGO_INITDB_DATABASE: trading_documents + ports: + - "27017:27017" + volumes: + - mongodb_data:/data/db + - ./database/mongodb/init:/docker-entrypoint-initdb.d + restart: unless-stopped + healthcheck: + test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"] + interval: 30s + timeout: 10s + retries: 3 + networks: + - trading-bot-network + + # Redis Insight - GUI for Dragonfly debugging + redis-insight: + image: redislabs/redisinsight:latest + container_name: trading-bot-redis-insight + ports: + - "8001:8001" + environment: + - REDIS_HOSTS=local:dragonfly:6379 + depends_on: + - dragonfly + restart: unless-stopped + networks: + - trading-bot-network + + # PgAdmin - PostgreSQL GUI + pgadmin: + image: dpage/pgadmin4:latest + container_name: trading-bot-pgadmin + environment: + PGADMIN_DEFAULT_EMAIL: admin@tradingbot.local + PGADMIN_DEFAULT_PASSWORD: admin123 + PGADMIN_CONFIG_SERVER_MODE: 'False' + PGADMIN_DISABLE_POSTFIX: 'true' + ports: + - "8080:80" + volumes: + - pgadmin_data:/var/lib/pgadmin + depends_on: + - postgres + restart: unless-stopped + networks: + - trading-bot-network + + # Mongo Express - MongoDB GUI + mongo-express: + image: mongo-express:latest + container_name: trading-bot-mongo-express + environment: + ME_CONFIG_MONGODB_ADMINUSERNAME: trading_admin + ME_CONFIG_MONGODB_ADMINPASSWORD: trading_mongo_dev + ME_CONFIG_MONGODB_SERVER: mongodb + ME_CONFIG_MONGODB_PORT: 27017 + ME_CONFIG_BASICAUTH_USERNAME: admin + ME_CONFIG_BASICAUTH_PASSWORD: admin123 + ports: + - "8081:8081" + depends_on: + - mongodb + restart: unless-stopped + networks: + - trading-bot-network + + # Prometheus - Metrics collection + prometheus: + image: prom/prometheus:latest + container_name: trading-bot-prometheus + ports: + - "9090:9090" + volumes: + - ./monitoring/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml + - prometheus_data:/prometheus + command: + - '--config.file=/etc/prometheus/prometheus.yml' + - '--storage.tsdb.path=/prometheus' + - '--web.console.libraries=/etc/prometheus/console_libraries' + - '--web.console.templates=/etc/prometheus/consoles' + - '--web.enable-lifecycle' + restart: unless-stopped + networks: + - trading-bot-network + + # Loki - Log aggregation + loki: + image: grafana/loki:2.9.2 + container_name: trading-bot-loki + ports: + - "3100:3100" + volumes: + - loki_data:/loki + - ./monitoring/loki:/etc/loki + command: -config.file=/etc/loki/loki-config.yaml + healthcheck: + test: ["CMD", "wget", "-q", "--spider", "http://localhost:3100/ready"] + interval: 30s + timeout: 10s + retries: 3 + restart: unless-stopped + networks: + - trading-bot-network + # Grafana - Visualization for logs and metrics + grafana: + image: grafana/grafana:10.2.0 + container_name: trading-bot-grafana + ports: + - "3000:3000" + environment: + - GF_SECURITY_ADMIN_PASSWORD=admin + - GF_SECURITY_ADMIN_USER=admin + - GF_PATHS_PROVISIONING=/etc/grafana/provisioning + - GF_USERS_ALLOW_SIGN_UP=false + volumes: + - grafana_data:/var/lib/grafana + - ./monitoring/grafana/provisioning:/etc/grafana/provisioning + depends_on: + - prometheus + - loki + restart: unless-stopped + networks: + - trading-bot-network + + # Bull Board - Queue monitoring + bull-board: + image: deadly0/bull-board + container_name: trading-bot-bull-board + ports: + - "3001:3000" + environment: + - REDIS_HOST=dragonfly + - REDIS_PORT=6379 + - REDIS_PASSWORD= + - REDIS_DB=0 + - REDIS_URL=redis://dragonfly:6379 + depends_on: + - dragonfly + restart: unless-stopped + networks: + - trading-bot-network + +volumes: + postgres_data: + questdb_data: + dragonfly_data: + mongodb_data: + pgadmin_data: + prometheus_data: + grafana_data: + loki_data: + +networks: + trading-bot-network: + driver: bridge diff --git a/docs/cache-library-usage.md b/docs/cache-library-usage.md index d4c36fa..7a0c684 100644 --- a/docs/cache-library-usage.md +++ b/docs/cache-library-usage.md @@ -1,532 +1,532 @@ -# Cache Library Usage Guide - -The `@stock-bot/cache` library provides a powerful, flexible caching solution designed specifically for trading bot applications. It supports multiple cache providers including Redis/Dragonfly, in-memory caching, and hybrid caching strategies. - -## Table of Contents - -1. [Installation](#installation) -2. [Quick Start](#quick-start) -3. [Cache Providers](#cache-providers) -4. [Factory Functions](#factory-functions) -5. [Cache Decorators](#cache-decorators) -6. [Trading-Specific Usage](#trading-specific-usage) -7. [Configuration](#configuration) -8. [Monitoring & Metrics](#monitoring--metrics) -9. [Error Handling](#error-handling) -10. [Best Practices](#best-practices) - -## Installation - -The cache library is already included in the monorepo. To use it in your service: - -```json -{ - "dependencies": { - "@stock-bot/cache": "*" - } -} -``` - -## Quick Start - -### Basic Usage - -```typescript -import { createCache } from '@stock-bot/cache'; - -// Auto-detect best cache type (hybrid if Redis available, otherwise memory) -const cache = createCache('auto'); - -// Basic operations -await cache.set('user:123', { name: 'John', balance: 1000 }, 3600); -const user = await cache.get<{ name: string; balance: number }>('user:123'); -await cache.delete('user:123'); -``` - -### Trading-Optimized Cache - -```typescript -import { createTradingCache } from '@stock-bot/cache'; - -const cache = createTradingCache({ - keyPrefix: 'trading:', - ttl: 300, // 5 minutes default - enableMetrics: true -}); - -// Cache market data -await cache.set('market:AAPL:price', { price: 150.25, timestamp: Date.now() }); -``` - -## Cache Providers - -### 1. Redis Cache (Dragonfly) - -Uses Redis/Dragonfly for distributed caching with persistence and high performance. - -```typescript -import { RedisCache } from '@stock-bot/cache'; - -const redisCache = new RedisCache({ - keyPrefix: 'app:', - ttl: 3600, - enableMetrics: true -}); - -// Automatic connection to Dragonfly using config -await redisCache.set('key', 'value'); -``` - -### 2. Memory Cache - -In-memory caching with LRU eviction and TTL support. - -```typescript -import { MemoryCache } from '@stock-bot/cache'; - -const memoryCache = new MemoryCache({ - maxSize: 1000, // Maximum 1000 entries - ttl: 300, // 5 minutes default TTL - cleanupInterval: 60 // Cleanup every minute -}); -``` - -### 3. Hybrid Cache - -Two-tier caching combining fast memory cache (L1) with persistent Redis cache (L2). - -```typescript -import { HybridCache } from '@stock-bot/cache'; - -const hybridCache = new HybridCache({ - memoryTTL: 60, // L1 cache TTL: 1 minute - redisTTL: 3600, // L2 cache TTL: 1 hour - memoryMaxSize: 500 // L1 cache max entries -}); - -// Data flows: Memory -> Redis -> Database -const data = await hybridCache.get('expensive:calculation'); -``` - -## Factory Functions - -### createCache() - -General-purpose cache factory with auto-detection. - -```typescript -import { createCache } from '@stock-bot/cache'; - -// Auto-detect (recommended) -const cache = createCache('auto'); - -// Specific provider -const redisCache = createCache('redis', { ttl: 1800 }); -const memoryCache = createCache('memory', { maxSize: 2000 }); -const hybridCache = createCache('hybrid'); -``` - -### createTradingCache() - -Optimized for trading operations with sensible defaults. - -```typescript -import { createTradingCache } from '@stock-bot/cache'; - -const tradingCache = createTradingCache({ - keyPrefix: 'trading:', - ttl: 300, // 5 minutes - good for price data - enableMetrics: true -}); -``` - -### createMarketDataCache() - -Specialized for market data with short TTLs. - -```typescript -import { createMarketDataCache } from '@stock-bot/cache'; - -const marketCache = createMarketDataCache({ - priceDataTTL: 30, // 30 seconds for price data - indicatorDataTTL: 300, // 5 minutes for indicators - newsDataTTL: 1800 // 30 minutes for news -}); -``` - -### createStrategyCache() - -For strategy computations and backtesting results. - -```typescript -import { createStrategyCache } from '@stock-bot/cache'; - -const strategyCache = createStrategyCache({ - backtestTTL: 86400, // 24 hours for backtest results - signalTTL: 300, // 5 minutes for signals - optimizationTTL: 3600 // 1 hour for optimization results -}); -``` - -## Cache Decorators - -### @Cacheable - -Automatically cache method results. - -```typescript -import { Cacheable } from '@stock-bot/cache'; - -class MarketDataService { - @Cacheable({ - keyGenerator: (symbol: string) => `price:${symbol}`, - ttl: 60 - }) - async getPrice(symbol: string): Promise { - // Expensive API call - return await this.fetchPriceFromAPI(symbol); - } -} -``` - -### @CacheEvict - -Invalidate cache entries when data changes. - -```typescript -import { CacheEvict } from '@stock-bot/cache'; - -class PortfolioService { - @CacheEvict({ - keyPattern: 'portfolio:*' - }) - async updatePosition(symbol: string, quantity: number): Promise { - // Update database - await this.savePosition(symbol, quantity); - // Cache automatically invalidated - } -} -``` - -### @CachePut - -Always execute method and update cache. - -```typescript -import { CachePut } from '@stock-bot/cache'; - -class StrategyService { - @CachePut({ - keyGenerator: (strategyId: string) => `strategy:${strategyId}:result` - }) - async runStrategy(strategyId: string): Promise { - const result = await this.executeStrategy(strategyId); - // Result always cached after execution - return result; - } -} -``` - -## Trading-Specific Usage - -### Market Data Caching - -```typescript -import { createMarketDataCache, CacheKeyGenerator } from '@stock-bot/cache'; - -const marketCache = createMarketDataCache(); -const keyGen = new CacheKeyGenerator(); - -// Cache price data -const priceKey = keyGen.priceKey('AAPL'); -await marketCache.set(priceKey, { price: 150.25, volume: 1000000 }, 30); - -// Cache technical indicators -const smaKey = keyGen.indicatorKey('AAPL', 'SMA', { period: 20 }); -await marketCache.set(smaKey, 148.50, 300); - -// Cache order book -const orderBookKey = keyGen.orderBookKey('AAPL'); -await marketCache.set(orderBookKey, orderBookData, 5); -``` - -### Strategy Result Caching - -```typescript -import { createStrategyCache, CacheKeyGenerator } from '@stock-bot/cache'; - -const strategyCache = createStrategyCache(); -const keyGen = new CacheKeyGenerator(); - -// Cache backtest results -const backtestKey = keyGen.backtestKey('momentum-strategy', { - startDate: '2024-01-01', - endDate: '2024-12-31', - symbol: 'AAPL' -}); -await strategyCache.set(backtestKey, backtestResults, 86400); - -// Cache trading signals -const signalKey = keyGen.signalKey('AAPL', 'momentum-strategy'); -await strategyCache.set(signalKey, { action: 'BUY', confidence: 0.85 }, 300); -``` - -### Portfolio Data Caching - -```typescript -import { createTradingCache, CacheKeyGenerator } from '@stock-bot/cache'; - -const portfolioCache = createTradingCache(); -const keyGen = new CacheKeyGenerator(); - -// Cache portfolio positions -const positionsKey = keyGen.portfolioKey('user123', 'positions'); -await portfolioCache.set(positionsKey, positions, 300); - -// Cache risk metrics -const riskKey = keyGen.riskKey('user123', 'VaR'); -await portfolioCache.set(riskKey, { var95: 1250.50 }, 3600); -``` - -## Configuration - -Cache configuration is handled through the `@stock-bot/config` package. Key settings: - -```typescript -// Dragonfly/Redis configuration -DRAGONFLY_HOST=localhost -DRAGONFLY_PORT=6379 -DRAGONFLY_PASSWORD=your_password -DRAGONFLY_DATABASE=0 -DRAGONFLY_MAX_RETRIES=3 -DRAGONFLY_RETRY_DELAY=100 -DRAGONFLY_CONNECT_TIMEOUT=10000 -DRAGONFLY_COMMAND_TIMEOUT=5000 - -// TLS settings (optional) -DRAGONFLY_TLS=true -DRAGONFLY_TLS_CERT_FILE=/path/to/cert.pem -DRAGONFLY_TLS_KEY_FILE=/path/to/key.pem -DRAGONFLY_TLS_CA_FILE=/path/to/ca.pem -``` - -## Monitoring & Metrics - -### Cache Statistics - -```typescript -const cache = createTradingCache({ enableMetrics: true }); - -// Get cache statistics -const stats = await cache.getStats(); -console.log(`Hit rate: ${stats.hitRate}%`); -console.log(`Total operations: ${stats.total}`); -console.log(`Uptime: ${stats.uptime} seconds`); -``` - -### Health Checks - -```typescript -const cache = createCache('hybrid'); - -// Check cache health -const isHealthy = await cache.isHealthy(); -if (!isHealthy) { - console.error('Cache is not healthy'); -} - -// Monitor connection status -cache.on('connect', () => console.log('Cache connected')); -cache.on('disconnect', () => console.error('Cache disconnected')); -cache.on('error', (error) => console.error('Cache error:', error)); -``` - -### Metrics Integration - -```typescript -// Export metrics to Prometheus/Grafana -const metrics = await cache.getStats(); - -// Custom metrics tracking -await cache.set('key', 'value', 300, { - tags: { service: 'trading-bot', operation: 'price-update' } -}); -``` - -## Error Handling - -The cache library implements graceful error handling: - -### Automatic Failover - -```typescript -// Hybrid cache automatically falls back to memory if Redis fails -const hybridCache = createCache('hybrid'); - -// If Redis is down, data is served from memory cache -const data = await hybridCache.get('key'); // Never throws, returns null if not found -``` - -### Circuit Breaker Pattern - -```typescript -const cache = createTradingCache({ - maxConsecutiveFailures: 5, // Open circuit after 5 failures - circuitBreakerTimeout: 30000 // Try again after 30 seconds -}); - -try { - await cache.set('key', 'value'); -} catch (error) { - // Handle cache unavailability - console.warn('Cache unavailable, falling back to direct data access'); -} -``` - -### Error Events - -```typescript -cache.on('error', (error) => { - if (error.code === 'CONNECTION_LOST') { - // Handle connection loss - await cache.reconnect(); - } -}); -``` - -## Best Practices - -### 1. Choose the Right Cache Type - -- **Memory Cache**: Fast access, limited by RAM, good for frequently accessed small data -- **Redis Cache**: Persistent, distributed, good for shared data across services -- **Hybrid Cache**: Best of both worlds, use for hot data with fallback - -### 2. Set Appropriate TTLs - -```typescript -// Trading data TTL guidelines -const TTL = { - PRICE_DATA: 30, // 30 seconds - very volatile - INDICATORS: 300, // 5 minutes - calculated values - NEWS: 1800, // 30 minutes - slower changing - BACKTEST_RESULTS: 86400, // 24 hours - expensive calculations - USER_PREFERENCES: 3600 // 1 hour - rarely change during session -}; -``` - -### 3. Use Proper Key Naming - -```typescript -// Good key naming convention -const keyGen = new CacheKeyGenerator(); -const key = keyGen.priceKey('AAPL'); // trading:price:AAPL:2024-01-01 - -// Avoid generic keys -// Bad: "data", "result", "temp" -// Good: "trading:price:AAPL", "strategy:momentum:signals" -``` - -### 4. Implement Cache Warming - -```typescript -// Pre-populate cache with frequently accessed data -async function warmupCache() { - const symbols = ['AAPL', 'GOOGL', 'MSFT']; - const cache = createMarketDataCache(); - - for (const symbol of symbols) { - const price = await fetchPrice(symbol); - await cache.set(keyGen.priceKey(symbol), price, 300); - } -} -``` - -### 5. Monitor Cache Performance - -```typescript -// Regular performance monitoring -setInterval(async () => { - const stats = await cache.getStats(); - if (stats.hitRate < 80) { - console.warn('Low cache hit rate:', stats.hitRate); - } -}, 60000); // Check every minute -``` - -### 6. Handle Cache Invalidation - -```typescript -// Invalidate related cache entries when data changes -class PositionService { - async updatePosition(symbol: string, quantity: number) { - await this.saveToDatabase(symbol, quantity); - - // Invalidate related cache entries - await cache.delete(`portfolio:positions`); - await cache.delete(`portfolio:risk:*`); - await cache.delete(`strategy:signals:${symbol}`); - } -} -``` - -## Advanced Examples - -### Custom Cache Provider - -```typescript -class DatabaseCache implements CacheProvider { - async get(key: string): Promise { - // Implement database-backed cache - } - - async set(key: string, value: T, ttl?: number): Promise { - // Store in database with expiration - } - - // ... implement other methods -} - -// Use with factory -const dbCache = new DatabaseCache(); -``` - -### Batch Operations - -```typescript -// Efficient batch operations -const keys = ['price:AAPL', 'price:GOOGL', 'price:MSFT']; -const values = await cache.mget(keys); - -const updates = new Map([ - ['price:AAPL', 150.25], - ['price:GOOGL', 2800.50], - ['price:MSFT', 350.75] -]); -await cache.mset(updates, 300); -``` - -### Conditional Caching - -```typescript -class SmartCache { - async getOrCompute( - key: string, - computeFn: () => Promise, - shouldCache: (value: T) => boolean = () => true - ): Promise { - let value = await this.cache.get(key); - - if (value === null) { - value = await computeFn(); - if (shouldCache(value)) { - await this.cache.set(key, value, this.defaultTTL); - } - } - - return value; - } -} -``` - -This cache library provides enterprise-grade caching capabilities specifically designed for trading bot applications, with built-in monitoring, error handling, and performance optimization. +# Cache Library Usage Guide + +The `@stock-bot/cache` library provides a powerful, flexible caching solution designed specifically for trading bot applications. It supports multiple cache providers including Redis/Dragonfly, in-memory caching, and hybrid caching strategies. + +## Table of Contents + +1. [Installation](#installation) +2. [Quick Start](#quick-start) +3. [Cache Providers](#cache-providers) +4. [Factory Functions](#factory-functions) +5. [Cache Decorators](#cache-decorators) +6. [Trading-Specific Usage](#trading-specific-usage) +7. [Configuration](#configuration) +8. [Monitoring & Metrics](#monitoring--metrics) +9. [Error Handling](#error-handling) +10. [Best Practices](#best-practices) + +## Installation + +The cache library is already included in the monorepo. To use it in your service: + +```json +{ + "dependencies": { + "@stock-bot/cache": "*" + } +} +``` + +## Quick Start + +### Basic Usage + +```typescript +import { createCache } from '@stock-bot/cache'; + +// Auto-detect best cache type (hybrid if Redis available, otherwise memory) +const cache = createCache('auto'); + +// Basic operations +await cache.set('user:123', { name: 'John', balance: 1000 }, 3600); +const user = await cache.get<{ name: string; balance: number }>('user:123'); +await cache.delete('user:123'); +``` + +### Trading-Optimized Cache + +```typescript +import { createTradingCache } from '@stock-bot/cache'; + +const cache = createTradingCache({ + keyPrefix: 'trading:', + ttl: 300, // 5 minutes default + enableMetrics: true +}); + +// Cache market data +await cache.set('market:AAPL:price', { price: 150.25, timestamp: Date.now() }); +``` + +## Cache Providers + +### 1. Redis Cache (Dragonfly) + +Uses Redis/Dragonfly for distributed caching with persistence and high performance. + +```typescript +import { RedisCache } from '@stock-bot/cache'; + +const redisCache = new RedisCache({ + keyPrefix: 'app:', + ttl: 3600, + enableMetrics: true +}); + +// Automatic connection to Dragonfly using config +await redisCache.set('key', 'value'); +``` + +### 2. Memory Cache + +In-memory caching with LRU eviction and TTL support. + +```typescript +import { MemoryCache } from '@stock-bot/cache'; + +const memoryCache = new MemoryCache({ + maxSize: 1000, // Maximum 1000 entries + ttl: 300, // 5 minutes default TTL + cleanupInterval: 60 // Cleanup every minute +}); +``` + +### 3. Hybrid Cache + +Two-tier caching combining fast memory cache (L1) with persistent Redis cache (L2). + +```typescript +import { HybridCache } from '@stock-bot/cache'; + +const hybridCache = new HybridCache({ + memoryTTL: 60, // L1 cache TTL: 1 minute + redisTTL: 3600, // L2 cache TTL: 1 hour + memoryMaxSize: 500 // L1 cache max entries +}); + +// Data flows: Memory -> Redis -> Database +const data = await hybridCache.get('expensive:calculation'); +``` + +## Factory Functions + +### createCache() + +General-purpose cache factory with auto-detection. + +```typescript +import { createCache } from '@stock-bot/cache'; + +// Auto-detect (recommended) +const cache = createCache('auto'); + +// Specific provider +const redisCache = createCache('redis', { ttl: 1800 }); +const memoryCache = createCache('memory', { maxSize: 2000 }); +const hybridCache = createCache('hybrid'); +``` + +### createTradingCache() + +Optimized for trading operations with sensible defaults. + +```typescript +import { createTradingCache } from '@stock-bot/cache'; + +const tradingCache = createTradingCache({ + keyPrefix: 'trading:', + ttl: 300, // 5 minutes - good for price data + enableMetrics: true +}); +``` + +### createMarketDataCache() + +Specialized for market data with short TTLs. + +```typescript +import { createMarketDataCache } from '@stock-bot/cache'; + +const marketCache = createMarketDataCache({ + priceDataTTL: 30, // 30 seconds for price data + indicatorDataTTL: 300, // 5 minutes for indicators + newsDataTTL: 1800 // 30 minutes for news +}); +``` + +### createStrategyCache() + +For strategy computations and backtesting results. + +```typescript +import { createStrategyCache } from '@stock-bot/cache'; + +const strategyCache = createStrategyCache({ + backtestTTL: 86400, // 24 hours for backtest results + signalTTL: 300, // 5 minutes for signals + optimizationTTL: 3600 // 1 hour for optimization results +}); +``` + +## Cache Decorators + +### @Cacheable + +Automatically cache method results. + +```typescript +import { Cacheable } from '@stock-bot/cache'; + +class MarketDataService { + @Cacheable({ + keyGenerator: (symbol: string) => `price:${symbol}`, + ttl: 60 + }) + async getPrice(symbol: string): Promise { + // Expensive API call + return await this.fetchPriceFromAPI(symbol); + } +} +``` + +### @CacheEvict + +Invalidate cache entries when data changes. + +```typescript +import { CacheEvict } from '@stock-bot/cache'; + +class PortfolioService { + @CacheEvict({ + keyPattern: 'portfolio:*' + }) + async updatePosition(symbol: string, quantity: number): Promise { + // Update database + await this.savePosition(symbol, quantity); + // Cache automatically invalidated + } +} +``` + +### @CachePut + +Always execute method and update cache. + +```typescript +import { CachePut } from '@stock-bot/cache'; + +class StrategyService { + @CachePut({ + keyGenerator: (strategyId: string) => `strategy:${strategyId}:result` + }) + async runStrategy(strategyId: string): Promise { + const result = await this.executeStrategy(strategyId); + // Result always cached after execution + return result; + } +} +``` + +## Trading-Specific Usage + +### Market Data Caching + +```typescript +import { createMarketDataCache, CacheKeyGenerator } from '@stock-bot/cache'; + +const marketCache = createMarketDataCache(); +const keyGen = new CacheKeyGenerator(); + +// Cache price data +const priceKey = keyGen.priceKey('AAPL'); +await marketCache.set(priceKey, { price: 150.25, volume: 1000000 }, 30); + +// Cache technical indicators +const smaKey = keyGen.indicatorKey('AAPL', 'SMA', { period: 20 }); +await marketCache.set(smaKey, 148.50, 300); + +// Cache order book +const orderBookKey = keyGen.orderBookKey('AAPL'); +await marketCache.set(orderBookKey, orderBookData, 5); +``` + +### Strategy Result Caching + +```typescript +import { createStrategyCache, CacheKeyGenerator } from '@stock-bot/cache'; + +const strategyCache = createStrategyCache(); +const keyGen = new CacheKeyGenerator(); + +// Cache backtest results +const backtestKey = keyGen.backtestKey('momentum-strategy', { + startDate: '2024-01-01', + endDate: '2024-12-31', + symbol: 'AAPL' +}); +await strategyCache.set(backtestKey, backtestResults, 86400); + +// Cache trading signals +const signalKey = keyGen.signalKey('AAPL', 'momentum-strategy'); +await strategyCache.set(signalKey, { action: 'BUY', confidence: 0.85 }, 300); +``` + +### Portfolio Data Caching + +```typescript +import { createTradingCache, CacheKeyGenerator } from '@stock-bot/cache'; + +const portfolioCache = createTradingCache(); +const keyGen = new CacheKeyGenerator(); + +// Cache portfolio positions +const positionsKey = keyGen.portfolioKey('user123', 'positions'); +await portfolioCache.set(positionsKey, positions, 300); + +// Cache risk metrics +const riskKey = keyGen.riskKey('user123', 'VaR'); +await portfolioCache.set(riskKey, { var95: 1250.50 }, 3600); +``` + +## Configuration + +Cache configuration is handled through the `@stock-bot/config` package. Key settings: + +```typescript +// Dragonfly/Redis configuration +DRAGONFLY_HOST=localhost +DRAGONFLY_PORT=6379 +DRAGONFLY_PASSWORD=your_password +DRAGONFLY_DATABASE=0 +DRAGONFLY_MAX_RETRIES=3 +DRAGONFLY_RETRY_DELAY=100 +DRAGONFLY_CONNECT_TIMEOUT=10000 +DRAGONFLY_COMMAND_TIMEOUT=5000 + +// TLS settings (optional) +DRAGONFLY_TLS=true +DRAGONFLY_TLS_CERT_FILE=/path/to/cert.pem +DRAGONFLY_TLS_KEY_FILE=/path/to/key.pem +DRAGONFLY_TLS_CA_FILE=/path/to/ca.pem +``` + +## Monitoring & Metrics + +### Cache Statistics + +```typescript +const cache = createTradingCache({ enableMetrics: true }); + +// Get cache statistics +const stats = await cache.getStats(); +console.log(`Hit rate: ${stats.hitRate}%`); +console.log(`Total operations: ${stats.total}`); +console.log(`Uptime: ${stats.uptime} seconds`); +``` + +### Health Checks + +```typescript +const cache = createCache('hybrid'); + +// Check cache health +const isHealthy = await cache.isHealthy(); +if (!isHealthy) { + console.error('Cache is not healthy'); +} + +// Monitor connection status +cache.on('connect', () => console.log('Cache connected')); +cache.on('disconnect', () => console.error('Cache disconnected')); +cache.on('error', (error) => console.error('Cache error:', error)); +``` + +### Metrics Integration + +```typescript +// Export metrics to Prometheus/Grafana +const metrics = await cache.getStats(); + +// Custom metrics tracking +await cache.set('key', 'value', 300, { + tags: { service: 'trading-bot', operation: 'price-update' } +}); +``` + +## Error Handling + +The cache library implements graceful error handling: + +### Automatic Failover + +```typescript +// Hybrid cache automatically falls back to memory if Redis fails +const hybridCache = createCache('hybrid'); + +// If Redis is down, data is served from memory cache +const data = await hybridCache.get('key'); // Never throws, returns null if not found +``` + +### Circuit Breaker Pattern + +```typescript +const cache = createTradingCache({ + maxConsecutiveFailures: 5, // Open circuit after 5 failures + circuitBreakerTimeout: 30000 // Try again after 30 seconds +}); + +try { + await cache.set('key', 'value'); +} catch (error) { + // Handle cache unavailability + console.warn('Cache unavailable, falling back to direct data access'); +} +``` + +### Error Events + +```typescript +cache.on('error', (error) => { + if (error.code === 'CONNECTION_LOST') { + // Handle connection loss + await cache.reconnect(); + } +}); +``` + +## Best Practices + +### 1. Choose the Right Cache Type + +- **Memory Cache**: Fast access, limited by RAM, good for frequently accessed small data +- **Redis Cache**: Persistent, distributed, good for shared data across services +- **Hybrid Cache**: Best of both worlds, use for hot data with fallback + +### 2. Set Appropriate TTLs + +```typescript +// Trading data TTL guidelines +const TTL = { + PRICE_DATA: 30, // 30 seconds - very volatile + INDICATORS: 300, // 5 minutes - calculated values + NEWS: 1800, // 30 minutes - slower changing + BACKTEST_RESULTS: 86400, // 24 hours - expensive calculations + USER_PREFERENCES: 3600 // 1 hour - rarely change during session +}; +``` + +### 3. Use Proper Key Naming + +```typescript +// Good key naming convention +const keyGen = new CacheKeyGenerator(); +const key = keyGen.priceKey('AAPL'); // trading:price:AAPL:2024-01-01 + +// Avoid generic keys +// Bad: "data", "result", "temp" +// Good: "trading:price:AAPL", "strategy:momentum:signals" +``` + +### 4. Implement Cache Warming + +```typescript +// Pre-populate cache with frequently accessed data +async function warmupCache() { + const symbols = ['AAPL', 'GOOGL', 'MSFT']; + const cache = createMarketDataCache(); + + for (const symbol of symbols) { + const price = await fetchPrice(symbol); + await cache.set(keyGen.priceKey(symbol), price, 300); + } +} +``` + +### 5. Monitor Cache Performance + +```typescript +// Regular performance monitoring +setInterval(async () => { + const stats = await cache.getStats(); + if (stats.hitRate < 80) { + console.warn('Low cache hit rate:', stats.hitRate); + } +}, 60000); // Check every minute +``` + +### 6. Handle Cache Invalidation + +```typescript +// Invalidate related cache entries when data changes +class PositionService { + async updatePosition(symbol: string, quantity: number) { + await this.saveToDatabase(symbol, quantity); + + // Invalidate related cache entries + await cache.delete(`portfolio:positions`); + await cache.delete(`portfolio:risk:*`); + await cache.delete(`strategy:signals:${symbol}`); + } +} +``` + +## Advanced Examples + +### Custom Cache Provider + +```typescript +class DatabaseCache implements CacheProvider { + async get(key: string): Promise { + // Implement database-backed cache + } + + async set(key: string, value: T, ttl?: number): Promise { + // Store in database with expiration + } + + // ... implement other methods +} + +// Use with factory +const dbCache = new DatabaseCache(); +``` + +### Batch Operations + +```typescript +// Efficient batch operations +const keys = ['price:AAPL', 'price:GOOGL', 'price:MSFT']; +const values = await cache.mget(keys); + +const updates = new Map([ + ['price:AAPL', 150.25], + ['price:GOOGL', 2800.50], + ['price:MSFT', 350.75] +]); +await cache.mset(updates, 300); +``` + +### Conditional Caching + +```typescript +class SmartCache { + async getOrCompute( + key: string, + computeFn: () => Promise, + shouldCache: (value: T) => boolean = () => true + ): Promise { + let value = await this.cache.get(key); + + if (value === null) { + value = await computeFn(); + if (shouldCache(value)) { + await this.cache.set(key, value, this.defaultTTL); + } + } + + return value; + } +} +``` + +This cache library provides enterprise-grade caching capabilities specifically designed for trading bot applications, with built-in monitoring, error handling, and performance optimization. diff --git a/docs/loki-logging.md b/docs/loki-logging.md index fe8018f..5ea7241 100644 --- a/docs/loki-logging.md +++ b/docs/loki-logging.md @@ -1,169 +1,169 @@ -# Loki Logging for Stock Bot - -This document outlines how to use the Loki logging system integrated with the Stock Bot platform (Updated June 2025). - -## Overview - -Loki provides centralized logging for all Stock Bot services with: - -1. **Centralized logging** for all microservices -2. **Log aggregation** and filtering by service, level, and custom labels -3. **Grafana integration** for visualization and dashboards -4. **Query capabilities** using LogQL for log analysis -5. **Alert capabilities** for critical issues - -## Getting Started - -### Starting the Logging Stack - -```cmd -# Start the monitoring stack (includes Loki and Grafana) -scripts\docker.ps1 monitoring -``` - -Or start services individually: - -```cmd -# Start Loki service only -docker-compose up -d loki - -# Start Loki and Grafana -docker-compose up -d loki grafana -``` - -### Viewing Logs - -Once started: - -1. Access Grafana at http://localhost:3000 (login with admin/admin) -2. Navigate to the "Stock Bot Logs" dashboard -3. View and query your logs - -## Using the Logger in Your Services - -The Stock Bot logger automatically sends logs to Loki using the updated pattern: - -```typescript -import { getLogger } from '@stock-bot/logger'; - -// Create a logger for your service -const logger = getLogger('your-service-name'); - -// Log at different levels -logger.debug('Detailed information for debugging'); -logger.info('General information about operations'); -logger.warn('Potential issues that don\'t affect operation'); -logger.error('Critical errors that require attention'); - -// Log with structured data (searchable in Loki) -logger.info('Processing trade', { - symbol: 'MSFT', - price: 410.75, - quantity: 50 -}); -``` - -## Configuration Options - -Logger configuration is managed through the `@stock-bot/config` package and can be set in your `.env` file: - -```bash -# Logging configuration -LOG_LEVEL=debug # debug, info, warn, error -LOG_CONSOLE=true # Log to console in addition to Loki -LOKI_HOST=localhost # Loki server hostname -LOKI_PORT=3100 # Loki server port -LOKI_RETENTION_DAYS=30 # Days to retain logs -LOKI_LABELS=environment=development,service=stock-bot # Default labels -LOKI_BATCH_SIZE=100 # Number of logs to batch before sending -LOKI_BATCH_WAIT=5 # Max time to wait before sending logs -``` - -## Useful Loki Queries - -Inside Grafana, you can use these LogQL queries to analyze your logs: - -1. **All logs from a specific service**: - ``` - {service="market-data-gateway"} - ``` - -2. **All error logs across all services**: - ``` - {level="error"} - ``` - -3. **Logs containing specific text**: - ``` - {service="market-data-gateway"} |= "trade" - ``` - -4. **Count of error logs by service over time**: - ``` - sum by(service) (count_over_time({level="error"}[5m])) - ``` - -## Testing the Logging Integration - -Test the logging integration using Bun: - -```cmd -# Run from project root using Bun (current runtime) -bun run tools/test-loki-logging.ts -``` - -## Architecture - -Our logging implementation follows this architecture: - -``` -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Trading Services│────►│ @stock-bot/loggerβ”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ getLogger() β”‚ - β””β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”˜ - β”‚ - β–Ό -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Loki β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ - β”‚ - β–Ό -β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -β”‚ Grafana β”‚ -β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ -``` - -## Adding New Dashboards - -To create new Grafana dashboards for log visualization: - -1. Build your dashboard in the Grafana UI -2. Export it to JSON -3. Add it to `monitoring/grafana/provisioning/dashboards/json/` -4. Restart the monitoring stack - -## Troubleshooting - -If logs aren't appearing in Grafana: - -1. Run the status check script to verify Loki and Grafana are working: - ```cmd - tools\check-loki-status.bat - ``` - -2. Check that Loki and Grafana containers are running: - ```cmd - docker ps | findstr "loki grafana" - ``` - -3. Verify .env configuration for Loki host and port: - ```cmd - type .env | findstr "LOKI_" - ``` - -4. Ensure your service has the latest @stock-bot/logger package - -5. Check for errors in the Loki container logs: - ```cmd - docker logs stock-bot-loki - ``` +# Loki Logging for Stock Bot + +This document outlines how to use the Loki logging system integrated with the Stock Bot platform (Updated June 2025). + +## Overview + +Loki provides centralized logging for all Stock Bot services with: + +1. **Centralized logging** for all microservices +2. **Log aggregation** and filtering by service, level, and custom labels +3. **Grafana integration** for visualization and dashboards +4. **Query capabilities** using LogQL for log analysis +5. **Alert capabilities** for critical issues + +## Getting Started + +### Starting the Logging Stack + +```cmd +# Start the monitoring stack (includes Loki and Grafana) +scripts\docker.ps1 monitoring +``` + +Or start services individually: + +```cmd +# Start Loki service only +docker-compose up -d loki + +# Start Loki and Grafana +docker-compose up -d loki grafana +``` + +### Viewing Logs + +Once started: + +1. Access Grafana at http://localhost:3000 (login with admin/admin) +2. Navigate to the "Stock Bot Logs" dashboard +3. View and query your logs + +## Using the Logger in Your Services + +The Stock Bot logger automatically sends logs to Loki using the updated pattern: + +```typescript +import { getLogger } from '@stock-bot/logger'; + +// Create a logger for your service +const logger = getLogger('your-service-name'); + +// Log at different levels +logger.debug('Detailed information for debugging'); +logger.info('General information about operations'); +logger.warn('Potential issues that don\'t affect operation'); +logger.error('Critical errors that require attention'); + +// Log with structured data (searchable in Loki) +logger.info('Processing trade', { + symbol: 'MSFT', + price: 410.75, + quantity: 50 +}); +``` + +## Configuration Options + +Logger configuration is managed through the `@stock-bot/config` package and can be set in your `.env` file: + +```bash +# Logging configuration +LOG_LEVEL=debug # debug, info, warn, error +LOG_CONSOLE=true # Log to console in addition to Loki +LOKI_HOST=localhost # Loki server hostname +LOKI_PORT=3100 # Loki server port +LOKI_RETENTION_DAYS=30 # Days to retain logs +LOKI_LABELS=environment=development,service=stock-bot # Default labels +LOKI_BATCH_SIZE=100 # Number of logs to batch before sending +LOKI_BATCH_WAIT=5 # Max time to wait before sending logs +``` + +## Useful Loki Queries + +Inside Grafana, you can use these LogQL queries to analyze your logs: + +1. **All logs from a specific service**: + ``` + {service="market-data-gateway"} + ``` + +2. **All error logs across all services**: + ``` + {level="error"} + ``` + +3. **Logs containing specific text**: + ``` + {service="market-data-gateway"} |= "trade" + ``` + +4. **Count of error logs by service over time**: + ``` + sum by(service) (count_over_time({level="error"}[5m])) + ``` + +## Testing the Logging Integration + +Test the logging integration using Bun: + +```cmd +# Run from project root using Bun (current runtime) +bun run tools/test-loki-logging.ts +``` + +## Architecture + +Our logging implementation follows this architecture: + +``` +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Trading Services│────►│ @stock-bot/loggerβ”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ β”‚ getLogger() β”‚ + β””β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Loki β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ + β”‚ + β–Ό +β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +β”‚ Grafana β”‚ +β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +``` + +## Adding New Dashboards + +To create new Grafana dashboards for log visualization: + +1. Build your dashboard in the Grafana UI +2. Export it to JSON +3. Add it to `monitoring/grafana/provisioning/dashboards/json/` +4. Restart the monitoring stack + +## Troubleshooting + +If logs aren't appearing in Grafana: + +1. Run the status check script to verify Loki and Grafana are working: + ```cmd + tools\check-loki-status.bat + ``` + +2. Check that Loki and Grafana containers are running: + ```cmd + docker ps | findstr "loki grafana" + ``` + +3. Verify .env configuration for Loki host and port: + ```cmd + type .env | findstr "LOKI_" + ``` + +4. Ensure your service has the latest @stock-bot/logger package + +5. Check for errors in the Loki container logs: + ```cmd + docker logs stock-bot-loki + ``` diff --git a/docs/testing-with-bun.md b/docs/testing-with-bun.md index 586db8f..5ae0064 100644 --- a/docs/testing-with-bun.md +++ b/docs/testing-with-bun.md @@ -1,65 +1,65 @@ -# Testing with Bun in Stock Bot Platform - -The Stock Bot platform uses [Bun Test](https://bun.sh/docs/cli/test) as the primary testing framework (Updated June 2025). Bun Test provides fast, modern testing with Jest-like API compatibility. - -## Getting Started - -Run tests using these commands: - -```cmd -# Run all tests (using Turbo) -bun test - -# Run tests in watch mode -bun test:watch - -# Run tests with coverage -bun test:coverage - -# Run specific test types -bun test:unit -bun test:integration -bun test:e2e -``` - -## Library-specific Testing - -Each library has its own testing configuration in a `bunfig.toml` file. This allows for library-specific test settings while maintaining consistent patterns across the codebase. - -### Example bunfig.toml: - -```toml -[test] -preload = ["./test/setup.ts"] -timeout = 5000 - -[test.env] -NODE_ENV = "test" - -[bun] -paths = { - "@/*" = ["./src/*"] -} -``` - -## Migration from Jest - -This project has been fully migrated from Jest to Bun Test. Some key differences: - -1. **Import statements**: Use `import { describe, it, expect } from 'bun:test'` instead of Jest imports -2. **Mocking**: Use Bun's built-in mocking utilities (see global `spyOn` helper) -3. **Configuration**: Use `bunfig.toml` instead of Jest config files -4. **Test helpers**: Test helpers are available globally via `global.testHelpers` - -## Best Practices - -- Use `describe` and `it` for test organization -- Use relative imports (`../src/`) in test files -- Keep test setup clean with proper `beforeEach` and `afterEach` handlers -- For complex test scenarios, create dedicated setup files - -## Test Environment - -- All tests run with `NODE_ENV=test` -- Console output is silenced by default (restore with `testHelpers.restoreConsole()`) -- Default timeout is 30 seconds for integration tests, 5 seconds for unit tests +# Testing with Bun in Stock Bot Platform + +The Stock Bot platform uses [Bun Test](https://bun.sh/docs/cli/test) as the primary testing framework (Updated June 2025). Bun Test provides fast, modern testing with Jest-like API compatibility. + +## Getting Started + +Run tests using these commands: + +```cmd +# Run all tests (using Turbo) +bun test + +# Run tests in watch mode +bun test:watch + +# Run tests with coverage +bun test:coverage + +# Run specific test types +bun test:unit +bun test:integration +bun test:e2e +``` + +## Library-specific Testing + +Each library has its own testing configuration in a `bunfig.toml` file. This allows for library-specific test settings while maintaining consistent patterns across the codebase. + +### Example bunfig.toml: + +```toml +[test] +preload = ["./test/setup.ts"] +timeout = 5000 + +[test.env] +NODE_ENV = "test" + +[bun] +paths = { + "@/*" = ["./src/*"] +} +``` + +## Migration from Jest + +This project has been fully migrated from Jest to Bun Test. Some key differences: + +1. **Import statements**: Use `import { describe, it, expect } from 'bun:test'` instead of Jest imports +2. **Mocking**: Use Bun's built-in mocking utilities (see global `spyOn` helper) +3. **Configuration**: Use `bunfig.toml` instead of Jest config files +4. **Test helpers**: Test helpers are available globally via `global.testHelpers` + +## Best Practices + +- Use `describe` and `it` for test organization +- Use relative imports (`../src/`) in test files +- Keep test setup clean with proper `beforeEach` and `afterEach` handlers +- For complex test scenarios, create dedicated setup files + +## Test Environment + +- All tests run with `NODE_ENV=test` +- Console output is silenced by default (restore with `testHelpers.restoreConsole()`) +- Default timeout is 30 seconds for integration tests, 5 seconds for unit tests diff --git a/docs/typescript-configuration.md b/docs/typescript-configuration.md index 8d97fa3..0d88271 100644 --- a/docs/typescript-configuration.md +++ b/docs/typescript-configuration.md @@ -1,118 +1,118 @@ -# TypeScript Configuration Structure - -This document explains the TypeScript configuration structure used in the Stock Bot trading platform. - -## Root Configuration - -The root `tsconfig.json` at the project root establishes common settings for all projects in the monorepo: - -```json -{ - "$schema": "https://json.schemastore.org/tsconfig", - "compilerOptions": { - "target": "ES2022", - "module": "NodeNext", - "moduleResolution": "NodeNext", - "strict": true, - "noImplicitAny": true, - "strictNullChecks": true, - "noImplicitThis": true, - "alwaysStrict": true, - "esModuleInterop": true, - "allowSyntheticDefaultImports": true, - "skipLibCheck": true, - "forceConsistentCasingInFileNames": true, - "resolveJsonModule": true, - "sourceMap": false, - "declaration": true, - "baseUrl": ".", - "paths": { - "@stock-bot/*": ["libs/*/src"] - } - }, - "exclude": [ - "node_modules", - "dist" - ] -} -``` - -## Template Configurations - -We provide two template configurations: - -1. `tsconfig.lib.json` - For library projects: -```json -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src", - "declaration": true - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"] -} -``` - -2. `tsconfig.app.json` - For application projects: -```json -{ - "extends": "../../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src", - "module": "ESNext", - "moduleResolution": "bundler", - "types": ["bun-types"] - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist"] -} -``` - -## Project-Specific Configurations - -Each project in the monorepo extends from the root configuration and adds its own specific settings: - -### Library Projects - -Library projects extend the root configuration with a relative path: -```json -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src", - "declaration": true - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"] -} -``` - -### Application Projects - -Application projects also extend the root configuration with a relative path: -```json -{ - "extends": "../../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src", - "module": "ESNext", - "moduleResolution": "bundler", - "types": ["bun-types"] - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist"] -} -``` - -## Special Configurations - -Some projects have special needs: - -1. **Trading Dashboard (Angular)**: Uses an extended configuration structure with separate files for app and testing. - -2. **Projects with TypeScript imports from extensions**: These projects set `"allowImportingTsExtensions": true` and `"noEmit": true`. +# TypeScript Configuration Structure + +This document explains the TypeScript configuration structure used in the Stock Bot trading platform. + +## Root Configuration + +The root `tsconfig.json` at the project root establishes common settings for all projects in the monorepo: + +```json +{ + "$schema": "https://json.schemastore.org/tsconfig", + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "strict": true, + "noImplicitAny": true, + "strictNullChecks": true, + "noImplicitThis": true, + "alwaysStrict": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "sourceMap": false, + "declaration": true, + "baseUrl": ".", + "paths": { + "@stock-bot/*": ["libs/*/src"] + } + }, + "exclude": [ + "node_modules", + "dist" + ] +} +``` + +## Template Configurations + +We provide two template configurations: + +1. `tsconfig.lib.json` - For library projects: +```json +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "declaration": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"] +} +``` + +2. `tsconfig.app.json` - For application projects: +```json +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "module": "ESNext", + "moduleResolution": "bundler", + "types": ["bun-types"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} +``` + +## Project-Specific Configurations + +Each project in the monorepo extends from the root configuration and adds its own specific settings: + +### Library Projects + +Library projects extend the root configuration with a relative path: +```json +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "declaration": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"] +} +``` + +### Application Projects + +Application projects also extend the root configuration with a relative path: +```json +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "module": "ESNext", + "moduleResolution": "bundler", + "types": ["bun-types"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} +``` + +## Special Configurations + +Some projects have special needs: + +1. **Trading Dashboard (Angular)**: Uses an extended configuration structure with separate files for app and testing. + +2. **Projects with TypeScript imports from extensions**: These projects set `"allowImportingTsExtensions": true` and `"noEmit": true`. diff --git a/libs/cache/package.json b/libs/cache/package.json index 681819e..99a4db4 100644 --- a/libs/cache/package.json +++ b/libs/cache/package.json @@ -1,32 +1,32 @@ -{ - "name": "@stock-bot/cache", - "version": "1.0.0", - "description": "Caching library for Redis and in-memory providers", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "clean": "rimraf dist", - "test": "bun test" - }, - "dependencies": { - "ioredis": "^5.3.2" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "bun-types": "^1.2.15" - }, - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] -} +{ + "name": "@stock-bot/cache", + "version": "1.0.0", + "description": "Caching library for Redis and in-memory providers", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "clean": "rimraf dist", + "test": "bun test" + }, + "dependencies": { + "ioredis": "^5.3.2" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "bun-types": "^1.2.15" + }, + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] +} diff --git a/libs/cache/src/decorators/cacheable.ts b/libs/cache/src/decorators/cacheable.ts index 3256088..00c05eb 100644 --- a/libs/cache/src/decorators/cacheable.ts +++ b/libs/cache/src/decorators/cacheable.ts @@ -1,265 +1,265 @@ -import { getLogger } from '@stock-bot/logger'; -import { CacheProvider } from '../types'; -import { CacheKeyGenerator } from '../key-generator'; - -const logger = getLogger('cache-decorator'); - -/** - * Method decorator for automatic caching - */ -export function Cacheable( - cacheProvider: CacheProvider, - options: { - keyGenerator?: (args: any[], target?: any, methodName?: string) => string; - ttl?: number; - skipFirstArg?: boolean; // Skip 'this' if it's the first argument - } = {} -) { - return function (target: any, propertyName: string, descriptor: PropertyDescriptor) { - const originalMethod = descriptor.value; - - descriptor.value = async function (...args: any[]) { - try { - // Generate cache key - const key = options.keyGenerator - ? options.keyGenerator(args, target, propertyName) - : generateDefaultKey(target.constructor.name, propertyName, args); - - // Try to get from cache - const cached = await cacheProvider.get(key); - if (cached !== null) { - logger.debug('Method cache hit', { - class: target.constructor.name, - method: propertyName, - key - }); - return cached; - } - - // Execute method and cache result - const result = await originalMethod.apply(this, args); - await cacheProvider.set(key, result, options.ttl); - - logger.debug('Method executed and cached', { - class: target.constructor.name, - method: propertyName, - key - }); - - return result; - } catch (error) { - logger.error('Cache decorator error', { - class: target.constructor.name, - method: propertyName, - error: error instanceof Error ? error.message : String(error) - }); - - // Fallback to original method if caching fails - return await originalMethod.apply(this, args); - } - }; - }; -} - -/** - * Cache invalidation decorator - */ -export function CacheEvict( - cacheProvider: CacheProvider, - options: { - keyGenerator?: (args: any[], target?: any, methodName?: string) => string | string[]; - evictBefore?: boolean; // Evict before method execution - } = {} -) { - return function (target: any, propertyName: string, descriptor: PropertyDescriptor) { - const originalMethod = descriptor.value; - - descriptor.value = async function (...args: any[]) { - try { - const keys = options.keyGenerator - ? options.keyGenerator(args, target, propertyName) - : generateDefaultKey(target.constructor.name, propertyName, args); - - const keysArray = Array.isArray(keys) ? keys : [keys]; - - if (options.evictBefore) { - // Evict before method execution - for (const key of keysArray) { - await cacheProvider.del(key); - } - logger.debug('Cache evicted before method execution', { - class: target.constructor.name, - method: propertyName, - keys: keysArray - }); - } - - // Execute method - const result = await originalMethod.apply(this, args); - - if (!options.evictBefore) { - // Evict after method execution - for (const key of keysArray) { - await cacheProvider.del(key); - } - logger.debug('Cache evicted after method execution', { - class: target.constructor.name, - method: propertyName, - keys: keysArray - }); - } - - return result; - } catch (error) { - logger.error('Cache evict decorator error', { - class: target.constructor.name, - method: propertyName, - error: error instanceof Error ? error.message : String(error) - }); - - // Continue with original method execution even if eviction fails - return await originalMethod.apply(this, args); - } - }; - }; -} - -/** - * Cache warming decorator - pre-populate cache with method results - */ -export function CacheWarm( - cacheProvider: CacheProvider, - options: { - keyGenerator?: (args: any[], target?: any, methodName?: string) => string; - ttl?: number; - warmupArgs: any[][]; // Array of argument arrays to warm up - } -) { - return function (target: any, propertyName: string, descriptor: PropertyDescriptor) { - const originalMethod = descriptor.value; - - // Warmup cache when method is first accessed - let warmed = false; - - descriptor.value = async function (...args: any[]) { - // Perform warmup if not done yet - if (!warmed) { - warmed = true; - setImmediate(async () => { - try { - for (const warmupArgs of options.warmupArgs) { - const key = options.keyGenerator - ? options.keyGenerator(warmupArgs, target, propertyName) - : generateDefaultKey(target.constructor.name, propertyName, warmupArgs); - - // Check if already cached - const exists = await cacheProvider.exists(key); - if (!exists) { - const result = await originalMethod.apply(this, warmupArgs); - await cacheProvider.set(key, result, options.ttl); - } - } - logger.info('Cache warmed up', { - class: target.constructor.name, - method: propertyName, - count: options.warmupArgs.length - }); - } catch (error) { - logger.error('Cache warmup failed', { - class: target.constructor.name, - method: propertyName, - error - }); - } - }); - } - - // Execute normal cacheable logic - const key = options.keyGenerator - ? options.keyGenerator(args, target, propertyName) - : generateDefaultKey(target.constructor.name, propertyName, args); - - const cached = await cacheProvider.get(key); - if (cached !== null) { - return cached; - } - - const result = await originalMethod.apply(this, args); - await cacheProvider.set(key, result, options.ttl); - - return result; - }; - }; -} - -/** - * Trading-specific decorators - */ - -/** - * Cache market data with appropriate TTL - */ -export function CacheMarketData( - cacheProvider: CacheProvider, - ttl: number = 300 // 5 minutes default -) { - return Cacheable(cacheProvider, { - keyGenerator: (args) => { - const [symbol, timeframe, date] = args; - return CacheKeyGenerator.marketData(symbol, timeframe, date); - }, - ttl - }); -} - -/** - * Cache technical indicators - */ -export function CacheIndicator( - cacheProvider: CacheProvider, - ttl: number = 600 // 10 minutes default -) { - return Cacheable(cacheProvider, { - keyGenerator: (args) => { - const [symbol, indicator, period, data] = args; - const dataHash = hashArray(data); - return CacheKeyGenerator.indicator(symbol, indicator, period, dataHash); - }, - ttl - }); -} - -/** - * Cache strategy results - */ -export function CacheStrategy( - cacheProvider: CacheProvider, - ttl: number = 1800 // 30 minutes default -) { - return Cacheable(cacheProvider, { - keyGenerator: (args) => { - const [strategyName, symbol, timeframe] = args; - return CacheKeyGenerator.strategy(strategyName, symbol, timeframe); - }, - ttl - }); -} - -/** - * Helper functions - */ -function generateDefaultKey(className: string, methodName: string, args: any[]): string { - const argsHash = hashArray(args); - return `method:${className}:${methodName}:${argsHash}`; -} - -function hashArray(arr: any[]): string { - const str = JSON.stringify(arr); - let hash = 0; - for (let i = 0; i < str.length; i++) { - const char = str.charCodeAt(i); - hash = ((hash << 5) - hash) + char; - hash = hash & hash; // Convert to 32-bit integer - } - return Math.abs(hash).toString(36); -} +import { getLogger } from '@stock-bot/logger'; +import { CacheProvider } from '../types'; +import { CacheKeyGenerator } from '../key-generator'; + +const logger = getLogger('cache-decorator'); + +/** + * Method decorator for automatic caching + */ +export function Cacheable( + cacheProvider: CacheProvider, + options: { + keyGenerator?: (args: any[], target?: any, methodName?: string) => string; + ttl?: number; + skipFirstArg?: boolean; // Skip 'this' if it's the first argument + } = {} +) { + return function (target: any, propertyName: string, descriptor: PropertyDescriptor) { + const originalMethod = descriptor.value; + + descriptor.value = async function (...args: any[]) { + try { + // Generate cache key + const key = options.keyGenerator + ? options.keyGenerator(args, target, propertyName) + : generateDefaultKey(target.constructor.name, propertyName, args); + + // Try to get from cache + const cached = await cacheProvider.get(key); + if (cached !== null) { + logger.debug('Method cache hit', { + class: target.constructor.name, + method: propertyName, + key + }); + return cached; + } + + // Execute method and cache result + const result = await originalMethod.apply(this, args); + await cacheProvider.set(key, result, options.ttl); + + logger.debug('Method executed and cached', { + class: target.constructor.name, + method: propertyName, + key + }); + + return result; + } catch (error) { + logger.error('Cache decorator error', { + class: target.constructor.name, + method: propertyName, + error: error instanceof Error ? error.message : String(error) + }); + + // Fallback to original method if caching fails + return await originalMethod.apply(this, args); + } + }; + }; +} + +/** + * Cache invalidation decorator + */ +export function CacheEvict( + cacheProvider: CacheProvider, + options: { + keyGenerator?: (args: any[], target?: any, methodName?: string) => string | string[]; + evictBefore?: boolean; // Evict before method execution + } = {} +) { + return function (target: any, propertyName: string, descriptor: PropertyDescriptor) { + const originalMethod = descriptor.value; + + descriptor.value = async function (...args: any[]) { + try { + const keys = options.keyGenerator + ? options.keyGenerator(args, target, propertyName) + : generateDefaultKey(target.constructor.name, propertyName, args); + + const keysArray = Array.isArray(keys) ? keys : [keys]; + + if (options.evictBefore) { + // Evict before method execution + for (const key of keysArray) { + await cacheProvider.del(key); + } + logger.debug('Cache evicted before method execution', { + class: target.constructor.name, + method: propertyName, + keys: keysArray + }); + } + + // Execute method + const result = await originalMethod.apply(this, args); + + if (!options.evictBefore) { + // Evict after method execution + for (const key of keysArray) { + await cacheProvider.del(key); + } + logger.debug('Cache evicted after method execution', { + class: target.constructor.name, + method: propertyName, + keys: keysArray + }); + } + + return result; + } catch (error) { + logger.error('Cache evict decorator error', { + class: target.constructor.name, + method: propertyName, + error: error instanceof Error ? error.message : String(error) + }); + + // Continue with original method execution even if eviction fails + return await originalMethod.apply(this, args); + } + }; + }; +} + +/** + * Cache warming decorator - pre-populate cache with method results + */ +export function CacheWarm( + cacheProvider: CacheProvider, + options: { + keyGenerator?: (args: any[], target?: any, methodName?: string) => string; + ttl?: number; + warmupArgs: any[][]; // Array of argument arrays to warm up + } +) { + return function (target: any, propertyName: string, descriptor: PropertyDescriptor) { + const originalMethod = descriptor.value; + + // Warmup cache when method is first accessed + let warmed = false; + + descriptor.value = async function (...args: any[]) { + // Perform warmup if not done yet + if (!warmed) { + warmed = true; + setImmediate(async () => { + try { + for (const warmupArgs of options.warmupArgs) { + const key = options.keyGenerator + ? options.keyGenerator(warmupArgs, target, propertyName) + : generateDefaultKey(target.constructor.name, propertyName, warmupArgs); + + // Check if already cached + const exists = await cacheProvider.exists(key); + if (!exists) { + const result = await originalMethod.apply(this, warmupArgs); + await cacheProvider.set(key, result, options.ttl); + } + } + logger.info('Cache warmed up', { + class: target.constructor.name, + method: propertyName, + count: options.warmupArgs.length + }); + } catch (error) { + logger.error('Cache warmup failed', { + class: target.constructor.name, + method: propertyName, + error + }); + } + }); + } + + // Execute normal cacheable logic + const key = options.keyGenerator + ? options.keyGenerator(args, target, propertyName) + : generateDefaultKey(target.constructor.name, propertyName, args); + + const cached = await cacheProvider.get(key); + if (cached !== null) { + return cached; + } + + const result = await originalMethod.apply(this, args); + await cacheProvider.set(key, result, options.ttl); + + return result; + }; + }; +} + +/** + * Trading-specific decorators + */ + +/** + * Cache market data with appropriate TTL + */ +export function CacheMarketData( + cacheProvider: CacheProvider, + ttl: number = 300 // 5 minutes default +) { + return Cacheable(cacheProvider, { + keyGenerator: (args) => { + const [symbol, timeframe, date] = args; + return CacheKeyGenerator.marketData(symbol, timeframe, date); + }, + ttl + }); +} + +/** + * Cache technical indicators + */ +export function CacheIndicator( + cacheProvider: CacheProvider, + ttl: number = 600 // 10 minutes default +) { + return Cacheable(cacheProvider, { + keyGenerator: (args) => { + const [symbol, indicator, period, data] = args; + const dataHash = hashArray(data); + return CacheKeyGenerator.indicator(symbol, indicator, period, dataHash); + }, + ttl + }); +} + +/** + * Cache strategy results + */ +export function CacheStrategy( + cacheProvider: CacheProvider, + ttl: number = 1800 // 30 minutes default +) { + return Cacheable(cacheProvider, { + keyGenerator: (args) => { + const [strategyName, symbol, timeframe] = args; + return CacheKeyGenerator.strategy(strategyName, symbol, timeframe); + }, + ttl + }); +} + +/** + * Helper functions + */ +function generateDefaultKey(className: string, methodName: string, args: any[]): string { + const argsHash = hashArray(args); + return `method:${className}:${methodName}:${argsHash}`; +} + +function hashArray(arr: any[]): string { + const str = JSON.stringify(arr); + let hash = 0; + for (let i = 0; i < str.length; i++) { + const char = str.charCodeAt(i); + hash = ((hash << 5) - hash) + char; + hash = hash & hash; // Convert to 32-bit integer + } + return Math.abs(hash).toString(36); +} diff --git a/libs/cache/src/index.ts b/libs/cache/src/index.ts index 6e48be3..d33c219 100644 --- a/libs/cache/src/index.ts +++ b/libs/cache/src/index.ts @@ -1,118 +1,118 @@ -import { dragonflyConfig } from '@stock-bot/config'; -import { RedisCache } from './providers/redis-cache'; -import { MemoryCache } from './providers/memory-cache'; -import { HybridCache } from './providers/hybrid-cache'; -import type { CacheProvider, CacheOptions, CacheConfig } from './types'; - -/** - * Factory for creating cache providers with smart defaults - * - * @param type 'redis' | 'memory' | 'hybrid' | 'auto' - * @param options configuration for the cache - */ -export function createCache( - type: 'redis' | 'memory' | 'hybrid' | 'auto' = 'auto', - options: CacheOptions = {} -): CacheProvider { - // Auto-detect best cache type based on environment - if (type === 'auto') { - try { - // Try to use hybrid cache if Redis/Dragonfly is configured - if (dragonflyConfig.DRAGONFLY_HOST) { - type = 'hybrid'; - } else { - type = 'memory'; - } - } catch { - // Fallback to memory if config is not available - type = 'memory'; - } - } - - switch (type) { - case 'redis': - return new RedisCache(options); - case 'memory': - return new MemoryCache(options); - case 'hybrid': - return new HybridCache(options); - default: - throw new Error(`Unknown cache type: ${type}`); - } -} - -/** - * Create a cache instance with trading-optimized defaults - */ -export function createTradingCache(options: Partial = {}): CacheProvider { - const defaultOptions: CacheOptions = { - keyPrefix: 'trading:', - ttl: 3600, // 1 hour default - memoryTTL: 300, // 5 minutes for memory cache - maxMemoryItems: 2000, // More items for trading data - enableMetrics: true, - ...options - }; - - return createCache('auto', defaultOptions); -} - -/** - * Create a cache for market data with appropriate settings - */ -export function createMarketDataCache(options: Partial = {}): CacheProvider { - const defaultOptions: CacheOptions = { - keyPrefix: 'market:', - ttl: 300, // 5 minutes for market data - memoryTTL: 60, // 1 minute in memory - maxMemoryItems: 5000, // Lots of market data - enableMetrics: true, - ...options - }; - - return createCache('auto', defaultOptions); -} - -/** - * Create a cache for indicators with longer TTL - */ -export function createIndicatorCache(options: Partial = {}): CacheProvider { - const defaultOptions: CacheOptions = { - keyPrefix: 'indicators:', - ttl: 1800, // 30 minutes for indicators - memoryTTL: 600, // 10 minutes in memory - maxMemoryItems: 1000, - enableMetrics: true, - ...options - }; - - return createCache('auto', defaultOptions); -} - -// Export types and classes -export type { - CacheProvider, - CacheOptions, - CacheConfig, - CacheStats, - CacheKey, - SerializationOptions -} from './types'; - -export { RedisCache } from './providers/redis-cache'; -export { MemoryCache } from './providers/memory-cache'; -export { HybridCache } from './providers/hybrid-cache'; - -export { CacheKeyGenerator } from './key-generator'; - -export { - Cacheable, - CacheEvict, - CacheWarm, - CacheMarketData, - CacheIndicator, - CacheStrategy -} from './decorators/cacheable'; - -// Default export for convenience +import { dragonflyConfig } from '@stock-bot/config'; +import { RedisCache } from './providers/redis-cache'; +import { MemoryCache } from './providers/memory-cache'; +import { HybridCache } from './providers/hybrid-cache'; +import type { CacheProvider, CacheOptions, CacheConfig } from './types'; + +/** + * Factory for creating cache providers with smart defaults + * + * @param type 'redis' | 'memory' | 'hybrid' | 'auto' + * @param options configuration for the cache + */ +export function createCache( + type: 'redis' | 'memory' | 'hybrid' | 'auto' = 'auto', + options: CacheOptions = {} +): CacheProvider { + // Auto-detect best cache type based on environment + if (type === 'auto') { + try { + // Try to use hybrid cache if Redis/Dragonfly is configured + if (dragonflyConfig.DRAGONFLY_HOST) { + type = 'hybrid'; + } else { + type = 'memory'; + } + } catch { + // Fallback to memory if config is not available + type = 'memory'; + } + } + + switch (type) { + case 'redis': + return new RedisCache(options); + case 'memory': + return new MemoryCache(options); + case 'hybrid': + return new HybridCache(options); + default: + throw new Error(`Unknown cache type: ${type}`); + } +} + +/** + * Create a cache instance with trading-optimized defaults + */ +export function createTradingCache(options: Partial = {}): CacheProvider { + const defaultOptions: CacheOptions = { + keyPrefix: 'trading:', + ttl: 3600, // 1 hour default + memoryTTL: 300, // 5 minutes for memory cache + maxMemoryItems: 2000, // More items for trading data + enableMetrics: true, + ...options + }; + + return createCache('auto', defaultOptions); +} + +/** + * Create a cache for market data with appropriate settings + */ +export function createMarketDataCache(options: Partial = {}): CacheProvider { + const defaultOptions: CacheOptions = { + keyPrefix: 'market:', + ttl: 300, // 5 minutes for market data + memoryTTL: 60, // 1 minute in memory + maxMemoryItems: 5000, // Lots of market data + enableMetrics: true, + ...options + }; + + return createCache('auto', defaultOptions); +} + +/** + * Create a cache for indicators with longer TTL + */ +export function createIndicatorCache(options: Partial = {}): CacheProvider { + const defaultOptions: CacheOptions = { + keyPrefix: 'indicators:', + ttl: 1800, // 30 minutes for indicators + memoryTTL: 600, // 10 minutes in memory + maxMemoryItems: 1000, + enableMetrics: true, + ...options + }; + + return createCache('auto', defaultOptions); +} + +// Export types and classes +export type { + CacheProvider, + CacheOptions, + CacheConfig, + CacheStats, + CacheKey, + SerializationOptions +} from './types'; + +export { RedisCache } from './providers/redis-cache'; +export { MemoryCache } from './providers/memory-cache'; +export { HybridCache } from './providers/hybrid-cache'; + +export { CacheKeyGenerator } from './key-generator'; + +export { + Cacheable, + CacheEvict, + CacheWarm, + CacheMarketData, + CacheIndicator, + CacheStrategy +} from './decorators/cacheable'; + +// Default export for convenience export default createCache; \ No newline at end of file diff --git a/libs/cache/src/key-generator.ts b/libs/cache/src/key-generator.ts index 2c16cbe..a6af7e1 100644 --- a/libs/cache/src/key-generator.ts +++ b/libs/cache/src/key-generator.ts @@ -1,73 +1,73 @@ -export class CacheKeyGenerator { - /** - * Generate cache key for market data - */ - static marketData(symbol: string, timeframe: string, date?: Date): string { - const dateStr = date ? date.toISOString().split('T')[0] : 'latest'; - return `market:${symbol.toLowerCase()}:${timeframe}:${dateStr}`; - } - - /** - * Generate cache key for technical indicators - */ - static indicator(symbol: string, indicator: string, period: number, dataHash: string): string { - return `indicator:${symbol.toLowerCase()}:${indicator}:${period}:${dataHash}`; - } - - /** - * Generate cache key for backtest results - */ - static backtest(strategyName: string, params: Record): string { - const paramHash = this.hashObject(params); - return `backtest:${strategyName}:${paramHash}`; - } - - /** - * Generate cache key for strategy results - */ - static strategy(strategyName: string, symbol: string, timeframe: string): string { - return `strategy:${strategyName}:${symbol.toLowerCase()}:${timeframe}`; - } - - /** - * Generate cache key for user sessions - */ - static userSession(userId: string): string { - return `session:${userId}`; - } - - /** - * Generate cache key for portfolio data - */ - static portfolio(userId: string, portfolioId: string): string { - return `portfolio:${userId}:${portfolioId}`; - } - - /** - * Generate cache key for real-time prices - */ - static realtimePrice(symbol: string): string { - return `price:realtime:${symbol.toLowerCase()}`; - } - - /** - * Generate cache key for order book data - */ - static orderBook(symbol: string, depth: number = 10): string { - return `orderbook:${symbol.toLowerCase()}:${depth}`; - } - - /** - * Create a simple hash from object for cache keys - */ - private static hashObject(obj: Record): string { - const str = JSON.stringify(obj, Object.keys(obj).sort()); - let hash = 0; - for (let i = 0; i < str.length; i++) { - const char = str.charCodeAt(i); - hash = ((hash << 5) - hash) + char; - hash = hash & hash; // Convert to 32-bit integer - } - return Math.abs(hash).toString(36); - } -} +export class CacheKeyGenerator { + /** + * Generate cache key for market data + */ + static marketData(symbol: string, timeframe: string, date?: Date): string { + const dateStr = date ? date.toISOString().split('T')[0] : 'latest'; + return `market:${symbol.toLowerCase()}:${timeframe}:${dateStr}`; + } + + /** + * Generate cache key for technical indicators + */ + static indicator(symbol: string, indicator: string, period: number, dataHash: string): string { + return `indicator:${symbol.toLowerCase()}:${indicator}:${period}:${dataHash}`; + } + + /** + * Generate cache key for backtest results + */ + static backtest(strategyName: string, params: Record): string { + const paramHash = this.hashObject(params); + return `backtest:${strategyName}:${paramHash}`; + } + + /** + * Generate cache key for strategy results + */ + static strategy(strategyName: string, symbol: string, timeframe: string): string { + return `strategy:${strategyName}:${symbol.toLowerCase()}:${timeframe}`; + } + + /** + * Generate cache key for user sessions + */ + static userSession(userId: string): string { + return `session:${userId}`; + } + + /** + * Generate cache key for portfolio data + */ + static portfolio(userId: string, portfolioId: string): string { + return `portfolio:${userId}:${portfolioId}`; + } + + /** + * Generate cache key for real-time prices + */ + static realtimePrice(symbol: string): string { + return `price:realtime:${symbol.toLowerCase()}`; + } + + /** + * Generate cache key for order book data + */ + static orderBook(symbol: string, depth: number = 10): string { + return `orderbook:${symbol.toLowerCase()}:${depth}`; + } + + /** + * Create a simple hash from object for cache keys + */ + private static hashObject(obj: Record): string { + const str = JSON.stringify(obj, Object.keys(obj).sort()); + let hash = 0; + for (let i = 0; i < str.length; i++) { + const char = str.charCodeAt(i); + hash = ((hash << 5) - hash) + char; + hash = hash & hash; // Convert to 32-bit integer + } + return Math.abs(hash).toString(36); + } +} diff --git a/libs/cache/src/providers/hybrid-cache.ts b/libs/cache/src/providers/hybrid-cache.ts index b3bc37f..f3e351f 100644 --- a/libs/cache/src/providers/hybrid-cache.ts +++ b/libs/cache/src/providers/hybrid-cache.ts @@ -1,261 +1,261 @@ -import { getLogger } from '@stock-bot/logger'; -import { CacheProvider, CacheOptions, CacheStats } from '../types'; -import { RedisCache } from './redis-cache'; -import { MemoryCache } from './memory-cache'; - -/** - * Hybrid cache provider that uses memory as L1 cache and Redis as L2 cache - * Provides the best of both worlds: fast memory access and persistent Redis storage - */ -export class HybridCache implements CacheProvider { - private memoryCache: MemoryCache; - private redisCache: RedisCache; - private logger = getLogger('hybrid-cache'); - private enableMetrics: boolean; - private startTime = Date.now(); - - private stats: CacheStats = { - hits: 0, - misses: 0, - errors: 0, - hitRate: 0, - total: 0, - uptime: 0 - }; - - constructor(options: CacheOptions = {}) { - this.enableMetrics = options.enableMetrics ?? true; - - // Create L1 (memory) cache with shorter TTL - this.memoryCache = new MemoryCache({ - ...options, - ttl: options.memoryTTL ?? 300, // 5 minutes for memory - maxMemoryItems: options.maxMemoryItems ?? 1000, - enableMetrics: false // We'll handle metrics at hybrid level - }); - - // Create L2 (Redis) cache with longer TTL - this.redisCache = new RedisCache({ - ...options, - enableMetrics: false // We'll handle metrics at hybrid level - }); - - this.logger.info('Hybrid cache initialized', { - memoryTTL: options.memoryTTL ?? 300, - redisTTL: options.ttl ?? 3600, - maxMemoryItems: options.maxMemoryItems ?? 1000 - }); - } - - private updateStats(hit: boolean, error = false): void { - if (!this.enableMetrics) return; - - if (error) { - this.stats.errors++; - } else if (hit) { - this.stats.hits++; - } else { - this.stats.misses++; - } - - this.stats.total = this.stats.hits + this.stats.misses; - this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0; - this.stats.uptime = Date.now() - this.startTime; - } - - async get(key: string): Promise { - try { - // Try L1 cache first (memory) - const memoryValue = await this.memoryCache.get(key); - if (memoryValue !== null) { - this.updateStats(true); - this.logger.debug('L1 cache hit', { key, hitRate: this.stats.hitRate }); - return memoryValue; - } - - // Try L2 cache (Redis) - const redisValue = await this.redisCache.get(key); - if (redisValue !== null) { - // Populate L1 cache for next access - await this.memoryCache.set(key, redisValue); - this.updateStats(true); - this.logger.debug('L2 cache hit, populating L1', { key, hitRate: this.stats.hitRate }); - return redisValue; - } - - // Complete miss - this.updateStats(false); - this.logger.debug('Cache miss (both L1 and L2)', { key }); - return null; - - } catch (error) { - this.updateStats(false, true); - this.logger.error('Hybrid cache get error', { - key, - error: error instanceof Error ? error.message : String(error) - }); - return null; - } - } - - async set(key: string, value: T, ttl?: number): Promise { - try { - // Set in both caches - const memoryPromise = this.memoryCache.set(key, value, Math.min(ttl ?? 300, 300)); - const redisPromise = this.redisCache.set(key, value, ttl); - - await Promise.allSettled([memoryPromise, redisPromise]); - this.logger.debug('Cache set (both L1 and L2)', { key, ttl }); - - } catch (error) { - this.updateStats(false, true); - this.logger.error('Hybrid cache set error', { - key, - error: error instanceof Error ? error.message : String(error) - }); - } - } - - async del(key: string): Promise { - try { - // Delete from both caches - const memoryPromise = this.memoryCache.del(key); - const redisPromise = this.redisCache.del(key); - - await Promise.allSettled([memoryPromise, redisPromise]); - this.logger.debug('Cache delete (both L1 and L2)', { key }); - - } catch (error) { - this.updateStats(false, true); - this.logger.error('Hybrid cache delete error', { - key, - error: error instanceof Error ? error.message : String(error) - }); - } - } - - async exists(key: string): Promise { - try { - // Check memory first, then Redis - const memoryExists = await this.memoryCache.exists(key); - if (memoryExists) return true; - - return await this.redisCache.exists(key); - - } catch (error) { - this.updateStats(false, true); - this.logger.error('Hybrid cache exists error', { - key, - error: error instanceof Error ? error.message : String(error) - }); - return false; - } - } - - async clear(): Promise { - try { - // Clear both caches - const memoryPromise = this.memoryCache.clear(); - const redisPromise = this.redisCache.clear(); - - await Promise.allSettled([memoryPromise, redisPromise]); - this.logger.info('Cache cleared (both L1 and L2)'); - - } catch (error) { - this.updateStats(false, true); - this.logger.error('Hybrid cache clear error', { - error: error instanceof Error ? error.message : String(error) - }); - } - } - - async health(): Promise { - try { - const memoryHealthy = await this.memoryCache.health(); - const redisHealthy = await this.redisCache.health(); - - // Hybrid cache is healthy if at least one cache is working - const isHealthy = memoryHealthy || redisHealthy; - - this.logger.debug('Hybrid cache health check', { - memory: memoryHealthy, - redis: redisHealthy, - overall: isHealthy - }); - - return isHealthy; - } catch (error) { - this.logger.error('Hybrid cache health check failed', error); - return false; - } - } - - getStats(): CacheStats { - return { - ...this.stats, - uptime: Date.now() - this.startTime - }; - } - - /** - * Get detailed stats for both cache layers - */ - getDetailedStats() { - return { - hybrid: this.getStats(), - memory: this.memoryCache.getStats(), - redis: this.redisCache.getStats() - }; - } - - /** - * Warm up the memory cache with frequently accessed keys from Redis - */ - async warmupMemoryCache(keys: string[]): Promise { - this.logger.info('Starting memory cache warmup', { keyCount: keys.length }); - - let warmed = 0; - for (const key of keys) { - try { - const value = await this.redisCache.get(key); - if (value !== null) { - await this.memoryCache.set(key, value); - warmed++; - } - } catch (error) { - this.logger.warn('Failed to warm up key', { key, error }); - } - } - - this.logger.info('Memory cache warmup completed', { - requested: keys.length, - warmed - }); - } - - /** - * Sync memory cache with Redis for specific keys - */ - async syncCaches(keys: string[]): Promise { - for (const key of keys) { - try { - const redisValue = await this.redisCache.get(key); - if (redisValue !== null) { - await this.memoryCache.set(key, redisValue); - } else { - await this.memoryCache.del(key); - } - } catch (error) { - this.logger.warn('Failed to sync key', { key, error }); - } - } - } - - /** - * Close connections for both caches - */ - async disconnect(): Promise { - await this.redisCache.disconnect(); - this.logger.info('Hybrid cache disconnected'); - } -} +import { getLogger } from '@stock-bot/logger'; +import { CacheProvider, CacheOptions, CacheStats } from '../types'; +import { RedisCache } from './redis-cache'; +import { MemoryCache } from './memory-cache'; + +/** + * Hybrid cache provider that uses memory as L1 cache and Redis as L2 cache + * Provides the best of both worlds: fast memory access and persistent Redis storage + */ +export class HybridCache implements CacheProvider { + private memoryCache: MemoryCache; + private redisCache: RedisCache; + private logger = getLogger('hybrid-cache'); + private enableMetrics: boolean; + private startTime = Date.now(); + + private stats: CacheStats = { + hits: 0, + misses: 0, + errors: 0, + hitRate: 0, + total: 0, + uptime: 0 + }; + + constructor(options: CacheOptions = {}) { + this.enableMetrics = options.enableMetrics ?? true; + + // Create L1 (memory) cache with shorter TTL + this.memoryCache = new MemoryCache({ + ...options, + ttl: options.memoryTTL ?? 300, // 5 minutes for memory + maxMemoryItems: options.maxMemoryItems ?? 1000, + enableMetrics: false // We'll handle metrics at hybrid level + }); + + // Create L2 (Redis) cache with longer TTL + this.redisCache = new RedisCache({ + ...options, + enableMetrics: false // We'll handle metrics at hybrid level + }); + + this.logger.info('Hybrid cache initialized', { + memoryTTL: options.memoryTTL ?? 300, + redisTTL: options.ttl ?? 3600, + maxMemoryItems: options.maxMemoryItems ?? 1000 + }); + } + + private updateStats(hit: boolean, error = false): void { + if (!this.enableMetrics) return; + + if (error) { + this.stats.errors++; + } else if (hit) { + this.stats.hits++; + } else { + this.stats.misses++; + } + + this.stats.total = this.stats.hits + this.stats.misses; + this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0; + this.stats.uptime = Date.now() - this.startTime; + } + + async get(key: string): Promise { + try { + // Try L1 cache first (memory) + const memoryValue = await this.memoryCache.get(key); + if (memoryValue !== null) { + this.updateStats(true); + this.logger.debug('L1 cache hit', { key, hitRate: this.stats.hitRate }); + return memoryValue; + } + + // Try L2 cache (Redis) + const redisValue = await this.redisCache.get(key); + if (redisValue !== null) { + // Populate L1 cache for next access + await this.memoryCache.set(key, redisValue); + this.updateStats(true); + this.logger.debug('L2 cache hit, populating L1', { key, hitRate: this.stats.hitRate }); + return redisValue; + } + + // Complete miss + this.updateStats(false); + this.logger.debug('Cache miss (both L1 and L2)', { key }); + return null; + + } catch (error) { + this.updateStats(false, true); + this.logger.error('Hybrid cache get error', { + key, + error: error instanceof Error ? error.message : String(error) + }); + return null; + } + } + + async set(key: string, value: T, ttl?: number): Promise { + try { + // Set in both caches + const memoryPromise = this.memoryCache.set(key, value, Math.min(ttl ?? 300, 300)); + const redisPromise = this.redisCache.set(key, value, ttl); + + await Promise.allSettled([memoryPromise, redisPromise]); + this.logger.debug('Cache set (both L1 and L2)', { key, ttl }); + + } catch (error) { + this.updateStats(false, true); + this.logger.error('Hybrid cache set error', { + key, + error: error instanceof Error ? error.message : String(error) + }); + } + } + + async del(key: string): Promise { + try { + // Delete from both caches + const memoryPromise = this.memoryCache.del(key); + const redisPromise = this.redisCache.del(key); + + await Promise.allSettled([memoryPromise, redisPromise]); + this.logger.debug('Cache delete (both L1 and L2)', { key }); + + } catch (error) { + this.updateStats(false, true); + this.logger.error('Hybrid cache delete error', { + key, + error: error instanceof Error ? error.message : String(error) + }); + } + } + + async exists(key: string): Promise { + try { + // Check memory first, then Redis + const memoryExists = await this.memoryCache.exists(key); + if (memoryExists) return true; + + return await this.redisCache.exists(key); + + } catch (error) { + this.updateStats(false, true); + this.logger.error('Hybrid cache exists error', { + key, + error: error instanceof Error ? error.message : String(error) + }); + return false; + } + } + + async clear(): Promise { + try { + // Clear both caches + const memoryPromise = this.memoryCache.clear(); + const redisPromise = this.redisCache.clear(); + + await Promise.allSettled([memoryPromise, redisPromise]); + this.logger.info('Cache cleared (both L1 and L2)'); + + } catch (error) { + this.updateStats(false, true); + this.logger.error('Hybrid cache clear error', { + error: error instanceof Error ? error.message : String(error) + }); + } + } + + async health(): Promise { + try { + const memoryHealthy = await this.memoryCache.health(); + const redisHealthy = await this.redisCache.health(); + + // Hybrid cache is healthy if at least one cache is working + const isHealthy = memoryHealthy || redisHealthy; + + this.logger.debug('Hybrid cache health check', { + memory: memoryHealthy, + redis: redisHealthy, + overall: isHealthy + }); + + return isHealthy; + } catch (error) { + this.logger.error('Hybrid cache health check failed', error); + return false; + } + } + + getStats(): CacheStats { + return { + ...this.stats, + uptime: Date.now() - this.startTime + }; + } + + /** + * Get detailed stats for both cache layers + */ + getDetailedStats() { + return { + hybrid: this.getStats(), + memory: this.memoryCache.getStats(), + redis: this.redisCache.getStats() + }; + } + + /** + * Warm up the memory cache with frequently accessed keys from Redis + */ + async warmupMemoryCache(keys: string[]): Promise { + this.logger.info('Starting memory cache warmup', { keyCount: keys.length }); + + let warmed = 0; + for (const key of keys) { + try { + const value = await this.redisCache.get(key); + if (value !== null) { + await this.memoryCache.set(key, value); + warmed++; + } + } catch (error) { + this.logger.warn('Failed to warm up key', { key, error }); + } + } + + this.logger.info('Memory cache warmup completed', { + requested: keys.length, + warmed + }); + } + + /** + * Sync memory cache with Redis for specific keys + */ + async syncCaches(keys: string[]): Promise { + for (const key of keys) { + try { + const redisValue = await this.redisCache.get(key); + if (redisValue !== null) { + await this.memoryCache.set(key, redisValue); + } else { + await this.memoryCache.del(key); + } + } catch (error) { + this.logger.warn('Failed to sync key', { key, error }); + } + } + } + + /** + * Close connections for both caches + */ + async disconnect(): Promise { + await this.redisCache.disconnect(); + this.logger.info('Hybrid cache disconnected'); + } +} diff --git a/libs/cache/src/providers/memory-cache.ts b/libs/cache/src/providers/memory-cache.ts index 19f7e70..28e740c 100644 --- a/libs/cache/src/providers/memory-cache.ts +++ b/libs/cache/src/providers/memory-cache.ts @@ -1,259 +1,259 @@ -import { getLogger } from '@stock-bot/logger'; -import { CacheProvider, CacheOptions, CacheStats } from '../types'; - -interface CacheEntry { - value: T; - expiry: number; - accessed: number; -} - -/** - * In-memory cache provider with LRU eviction and comprehensive metrics - */ -export class MemoryCache implements CacheProvider { - private store = new Map>(); - private logger = getLogger('memory-cache'); - private defaultTTL: number; - private keyPrefix: string; - private maxItems: number; - private enableMetrics: boolean; - private startTime = Date.now(); - - private stats: CacheStats = { - hits: 0, - misses: 0, - errors: 0, - hitRate: 0, - total: 0, - uptime: 0 - }; - - constructor(options: CacheOptions = {}) { - this.defaultTTL = options.ttl ?? 3600; // 1 hour default - this.keyPrefix = options.keyPrefix ?? 'cache:'; - this.maxItems = options.maxMemoryItems ?? 1000; - this.enableMetrics = options.enableMetrics ?? true; - - this.logger.info('Memory cache initialized', { - maxItems: this.maxItems, - defaultTTL: this.defaultTTL, - enableMetrics: this.enableMetrics - }); - - // Cleanup expired entries every 5 minutes - setInterval(() => this.cleanup(), 5 * 60 * 1000); - } - - private getKey(key: string): string { - return `${this.keyPrefix}${key}`; - } - - private updateStats(hit: boolean, error = false): void { - if (!this.enableMetrics) return; - - if (error) { - this.stats.errors++; - } else if (hit) { - this.stats.hits++; - } else { - this.stats.misses++; - } - - this.stats.total = this.stats.hits + this.stats.misses; - this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0; - this.stats.uptime = Date.now() - this.startTime; - } - - private cleanup(): void { - const now = Date.now(); - let cleaned = 0; - - for (const [key, entry] of this.store.entries()) { - if (entry.expiry < now) { - this.store.delete(key); - cleaned++; - } - } - - if (cleaned > 0) { - this.logger.debug('Cleaned expired entries', { - cleaned, - remaining: this.store.size - }); - } - } - - private evictLRU(): void { - if (this.store.size <= this.maxItems) return; - - // Find least recently accessed item - let oldestKey = ''; - let oldestAccess = Date.now(); - - for (const [key, entry] of this.store.entries()) { - if (entry.accessed < oldestAccess) { - oldestAccess = entry.accessed; - oldestKey = key; - } - } - - if (oldestKey) { - this.store.delete(oldestKey); - this.logger.debug('Evicted LRU entry', { key: oldestKey }); - } - } - - async get(key: string): Promise { - try { - const fullKey = this.getKey(key); - const entry = this.store.get(fullKey); - - if (!entry) { - this.updateStats(false); - this.logger.debug('Cache miss', { key }); - return null; - } - - const now = Date.now(); - if (entry.expiry < now) { - this.store.delete(fullKey); - this.updateStats(false); - this.logger.debug('Cache miss (expired)', { key }); - return null; - } - - // Update access time for LRU - entry.accessed = now; - this.updateStats(true); - this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate }); - - return entry.value; - } catch (error) { - this.updateStats(false, true); - this.logger.error('Cache get error', { - key, - error: error instanceof Error ? error.message : String(error) - }); - return null; - } - } - - async set(key: string, value: T, ttl?: number): Promise { - try { - const fullKey = this.getKey(key); - const now = Date.now(); - const expiry = now + 1000 * (ttl ?? this.defaultTTL); - - // Evict if necessary - this.evictLRU(); - - this.store.set(fullKey, { - value, - expiry, - accessed: now - }); - - this.logger.debug('Cache set', { key, ttl: ttl ?? this.defaultTTL }); - } catch (error) { - this.updateStats(false, true); - this.logger.error('Cache set error', { - key, - error: error instanceof Error ? error.message : String(error) - }); - } - } - - async del(key: string): Promise { - try { - const fullKey = this.getKey(key); - const deleted = this.store.delete(fullKey); - this.logger.debug('Cache delete', { key, deleted }); - } catch (error) { - this.updateStats(false, true); - this.logger.error('Cache delete error', { - key, - error: error instanceof Error ? error.message : String(error) - }); - } - } - - async exists(key: string): Promise { - try { - const fullKey = this.getKey(key); - const entry = this.store.get(fullKey); - - if (!entry) return false; - - // Check if expired - if (entry.expiry < Date.now()) { - this.store.delete(fullKey); - return false; - } - - return true; - } catch (error) { - this.updateStats(false, true); - this.logger.error('Cache exists error', { - key, - error: error instanceof Error ? error.message : String(error) - }); - return false; - } - } - - async clear(): Promise { - try { - const size = this.store.size; - this.store.clear(); - this.logger.info('Cache cleared', { entriesDeleted: size }); - } catch (error) { - this.updateStats(false, true); - this.logger.error('Cache clear error', { - error: error instanceof Error ? error.message : String(error) - }); - } - } - - async health(): Promise { - try { - // Simple health check - try to set and get a test value - await this.set('__health_check__', 'ok', 1); - const result = await this.get('__health_check__'); - await this.del('__health_check__'); - return result === 'ok'; - } catch (error) { - this.logger.error('Memory cache health check failed', error); - return false; - } - } - - getStats(): CacheStats { - return { - ...this.stats, - uptime: Date.now() - this.startTime - }; - } - - /** - * Get additional memory cache specific stats - */ - getMemoryStats() { - return { - ...this.getStats(), - entries: this.store.size, - maxItems: this.maxItems, - memoryUsage: this.estimateMemoryUsage() - }; - } - - private estimateMemoryUsage(): number { - // Rough estimation of memory usage in bytes - let bytes = 0; - for (const [key, entry] of this.store.entries()) { - bytes += key.length * 2; // UTF-16 characters - bytes += JSON.stringify(entry.value).length * 2; - bytes += 24; // Overhead for entry object - } - return bytes; - } -} +import { getLogger } from '@stock-bot/logger'; +import { CacheProvider, CacheOptions, CacheStats } from '../types'; + +interface CacheEntry { + value: T; + expiry: number; + accessed: number; +} + +/** + * In-memory cache provider with LRU eviction and comprehensive metrics + */ +export class MemoryCache implements CacheProvider { + private store = new Map>(); + private logger = getLogger('memory-cache'); + private defaultTTL: number; + private keyPrefix: string; + private maxItems: number; + private enableMetrics: boolean; + private startTime = Date.now(); + + private stats: CacheStats = { + hits: 0, + misses: 0, + errors: 0, + hitRate: 0, + total: 0, + uptime: 0 + }; + + constructor(options: CacheOptions = {}) { + this.defaultTTL = options.ttl ?? 3600; // 1 hour default + this.keyPrefix = options.keyPrefix ?? 'cache:'; + this.maxItems = options.maxMemoryItems ?? 1000; + this.enableMetrics = options.enableMetrics ?? true; + + this.logger.info('Memory cache initialized', { + maxItems: this.maxItems, + defaultTTL: this.defaultTTL, + enableMetrics: this.enableMetrics + }); + + // Cleanup expired entries every 5 minutes + setInterval(() => this.cleanup(), 5 * 60 * 1000); + } + + private getKey(key: string): string { + return `${this.keyPrefix}${key}`; + } + + private updateStats(hit: boolean, error = false): void { + if (!this.enableMetrics) return; + + if (error) { + this.stats.errors++; + } else if (hit) { + this.stats.hits++; + } else { + this.stats.misses++; + } + + this.stats.total = this.stats.hits + this.stats.misses; + this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0; + this.stats.uptime = Date.now() - this.startTime; + } + + private cleanup(): void { + const now = Date.now(); + let cleaned = 0; + + for (const [key, entry] of this.store.entries()) { + if (entry.expiry < now) { + this.store.delete(key); + cleaned++; + } + } + + if (cleaned > 0) { + this.logger.debug('Cleaned expired entries', { + cleaned, + remaining: this.store.size + }); + } + } + + private evictLRU(): void { + if (this.store.size <= this.maxItems) return; + + // Find least recently accessed item + let oldestKey = ''; + let oldestAccess = Date.now(); + + for (const [key, entry] of this.store.entries()) { + if (entry.accessed < oldestAccess) { + oldestAccess = entry.accessed; + oldestKey = key; + } + } + + if (oldestKey) { + this.store.delete(oldestKey); + this.logger.debug('Evicted LRU entry', { key: oldestKey }); + } + } + + async get(key: string): Promise { + try { + const fullKey = this.getKey(key); + const entry = this.store.get(fullKey); + + if (!entry) { + this.updateStats(false); + this.logger.debug('Cache miss', { key }); + return null; + } + + const now = Date.now(); + if (entry.expiry < now) { + this.store.delete(fullKey); + this.updateStats(false); + this.logger.debug('Cache miss (expired)', { key }); + return null; + } + + // Update access time for LRU + entry.accessed = now; + this.updateStats(true); + this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate }); + + return entry.value; + } catch (error) { + this.updateStats(false, true); + this.logger.error('Cache get error', { + key, + error: error instanceof Error ? error.message : String(error) + }); + return null; + } + } + + async set(key: string, value: T, ttl?: number): Promise { + try { + const fullKey = this.getKey(key); + const now = Date.now(); + const expiry = now + 1000 * (ttl ?? this.defaultTTL); + + // Evict if necessary + this.evictLRU(); + + this.store.set(fullKey, { + value, + expiry, + accessed: now + }); + + this.logger.debug('Cache set', { key, ttl: ttl ?? this.defaultTTL }); + } catch (error) { + this.updateStats(false, true); + this.logger.error('Cache set error', { + key, + error: error instanceof Error ? error.message : String(error) + }); + } + } + + async del(key: string): Promise { + try { + const fullKey = this.getKey(key); + const deleted = this.store.delete(fullKey); + this.logger.debug('Cache delete', { key, deleted }); + } catch (error) { + this.updateStats(false, true); + this.logger.error('Cache delete error', { + key, + error: error instanceof Error ? error.message : String(error) + }); + } + } + + async exists(key: string): Promise { + try { + const fullKey = this.getKey(key); + const entry = this.store.get(fullKey); + + if (!entry) return false; + + // Check if expired + if (entry.expiry < Date.now()) { + this.store.delete(fullKey); + return false; + } + + return true; + } catch (error) { + this.updateStats(false, true); + this.logger.error('Cache exists error', { + key, + error: error instanceof Error ? error.message : String(error) + }); + return false; + } + } + + async clear(): Promise { + try { + const size = this.store.size; + this.store.clear(); + this.logger.info('Cache cleared', { entriesDeleted: size }); + } catch (error) { + this.updateStats(false, true); + this.logger.error('Cache clear error', { + error: error instanceof Error ? error.message : String(error) + }); + } + } + + async health(): Promise { + try { + // Simple health check - try to set and get a test value + await this.set('__health_check__', 'ok', 1); + const result = await this.get('__health_check__'); + await this.del('__health_check__'); + return result === 'ok'; + } catch (error) { + this.logger.error('Memory cache health check failed', error); + return false; + } + } + + getStats(): CacheStats { + return { + ...this.stats, + uptime: Date.now() - this.startTime + }; + } + + /** + * Get additional memory cache specific stats + */ + getMemoryStats() { + return { + ...this.getStats(), + entries: this.store.size, + maxItems: this.maxItems, + memoryUsage: this.estimateMemoryUsage() + }; + } + + private estimateMemoryUsage(): number { + // Rough estimation of memory usage in bytes + let bytes = 0; + for (const [key, entry] of this.store.entries()) { + bytes += key.length * 2; // UTF-16 characters + bytes += JSON.stringify(entry.value).length * 2; + bytes += 24; // Overhead for entry object + } + return bytes; + } +} diff --git a/libs/cache/src/providers/redis-cache.ts b/libs/cache/src/providers/redis-cache.ts index 4f2f249..dd86f2d 100644 --- a/libs/cache/src/providers/redis-cache.ts +++ b/libs/cache/src/providers/redis-cache.ts @@ -1,263 +1,263 @@ -import Redis from 'ioredis'; -import { getLogger } from '@stock-bot/logger'; -import { dragonflyConfig } from '@stock-bot/config'; -import { CacheProvider, CacheOptions, CacheStats } from '../types'; - -/** - * Redis-based cache provider with comprehensive error handling and metrics - */ -export class RedisCache implements CacheProvider { - private redis: Redis; - private logger = getLogger('redis-cache'); - private defaultTTL: number; - private keyPrefix: string; - private enableMetrics: boolean; - private isConnected = false; - private startTime = Date.now(); - - private stats: CacheStats = { - hits: 0, - misses: 0, - errors: 0, - hitRate: 0, - total: 0, - uptime: 0 - }; - - constructor(options: CacheOptions = {}) { - this.defaultTTL = options.ttl ?? 3600; // 1 hour default - this.keyPrefix = options.keyPrefix ?? 'cache:'; - this.enableMetrics = options.enableMetrics ?? true; - - const redisConfig = { - host: dragonflyConfig.DRAGONFLY_HOST, - port: dragonflyConfig.DRAGONFLY_PORT, - password: dragonflyConfig.DRAGONFLY_PASSWORD || undefined, - username: dragonflyConfig.DRAGONFLY_USERNAME || undefined, - db: dragonflyConfig.DRAGONFLY_DATABASE, - maxRetriesPerRequest: dragonflyConfig.DRAGONFLY_MAX_RETRIES, - retryDelayOnFailover: dragonflyConfig.DRAGONFLY_RETRY_DELAY, - connectTimeout: dragonflyConfig.DRAGONFLY_CONNECT_TIMEOUT, - commandTimeout: dragonflyConfig.DRAGONFLY_COMMAND_TIMEOUT, - keepAlive: dragonflyConfig.DRAGONFLY_ENABLE_KEEPALIVE ? dragonflyConfig.DRAGONFLY_KEEPALIVE_INTERVAL * 1000 : 0, - ...(dragonflyConfig.DRAGONFLY_TLS && { - tls: { - cert: dragonflyConfig.DRAGONFLY_TLS_CERT_FILE || undefined, - key: dragonflyConfig.DRAGONFLY_TLS_KEY_FILE || undefined, - ca: dragonflyConfig.DRAGONFLY_TLS_CA_FILE || undefined, - rejectUnauthorized: !dragonflyConfig.DRAGONFLY_TLS_SKIP_VERIFY, - } - }) - }; - - this.redis = new Redis(redisConfig); - this.setupEventHandlers(); - } - - private setupEventHandlers(): void { - this.redis.on('connect', () => { - this.isConnected = true; - this.logger.info('Redis cache connected', { - host: dragonflyConfig.DRAGONFLY_HOST, - port: dragonflyConfig.DRAGONFLY_PORT, - db: dragonflyConfig.DRAGONFLY_DATABASE - }); - }); - - this.redis.on('ready', () => { - this.logger.info('Redis cache ready for commands'); - }); - - this.redis.on('error', (error) => { - this.isConnected = false; - this.stats.errors++; - this.logger.error('Redis cache connection error', { error: error.message }); - }); - - this.redis.on('close', () => { - this.isConnected = false; - this.logger.warn('Redis cache connection closed'); - }); - - this.redis.on('reconnecting', () => { - this.logger.info('Redis cache reconnecting...'); - }); - } - - private getKey(key: string): string { - return `${this.keyPrefix}${key}`; - } - - private updateStats(hit: boolean, error = false): void { - if (!this.enableMetrics) return; - - if (error) { - this.stats.errors++; - } else if (hit) { - this.stats.hits++; - } else { - this.stats.misses++; - } - - this.stats.total = this.stats.hits + this.stats.misses; - this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0; - this.stats.uptime = Date.now() - this.startTime; - } - - private async safeExecute( - operation: () => Promise, - fallback: T, - operationName: string - ): Promise { - if (!this.isConnected) { - this.logger.warn(`Redis not connected for ${operationName}, using fallback`); - this.updateStats(false, true); - return fallback; - } - - try { - return await operation(); - } catch (error) { - this.logger.error(`Redis ${operationName} failed`, { - error: error instanceof Error ? error.message : String(error) - }); - this.updateStats(false, true); - return fallback; - } - } - - async get(key: string): Promise { - return this.safeExecute( - async () => { - const fullKey = this.getKey(key); - const value = await this.redis.get(fullKey); - - if (value === null) { - this.updateStats(false); - this.logger.debug('Cache miss', { key }); - return null; - } - - this.updateStats(true); - this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate }); - - try { - return JSON.parse(value) as T; - } catch { - // Return as-is if not valid JSON - return value as unknown as T; - } - }, - null, - 'get' - ); - } - - async set(key: string, value: T, ttl?: number): Promise { - await this.safeExecute( - async () => { - const fullKey = this.getKey(key); - const serialized = typeof value === 'string' ? value : JSON.stringify(value); - const expiry = ttl ?? this.defaultTTL; - - await this.redis.setex(fullKey, expiry, serialized); - this.logger.debug('Cache set', { key, ttl: expiry }); - }, - undefined, - 'set' - ); - } - - async del(key: string): Promise { - await this.safeExecute( - async () => { - const fullKey = this.getKey(key); - await this.redis.del(fullKey); - this.logger.debug('Cache delete', { key }); - }, - undefined, - 'del' - ); - } - - async exists(key: string): Promise { - return this.safeExecute( - async () => { - const fullKey = this.getKey(key); - const result = await this.redis.exists(fullKey); - return result === 1; - }, - false, - 'exists' - ); - } - - async clear(): Promise { - await this.safeExecute( - async () => { - const pattern = `${this.keyPrefix}*`; - const keys = await this.redis.keys(pattern); - if (keys.length > 0) { - await this.redis.del(...keys); - this.logger.info('Cache cleared', { keysDeleted: keys.length }); - } - }, - undefined, - 'clear' - ); - } - - async health(): Promise { - try { - const pong = await this.redis.ping(); - return pong === 'PONG' && this.isConnected; - } catch (error) { - this.logger.error('Redis health check failed', error); - return false; - } - } - - getStats(): CacheStats { - return { - ...this.stats, - uptime: Date.now() - this.startTime - }; - } - - /** - * Trading-specific convenience methods - */ - async cacheMarketData(symbol: string, timeframe: string, data: any[], ttl = 300): Promise { - const key = `market:${symbol}:${timeframe}`; - await this.set(key, data, ttl); - } - - async getMarketData(symbol: string, timeframe: string): Promise { - const key = `market:${symbol}:${timeframe}`; - return this.get(key); - } - - async cacheIndicator( - symbol: string, - indicator: string, - period: number, - data: number[], - ttl = 600 - ): Promise { - const key = `indicator:${symbol}:${indicator}:${period}`; - await this.set(key, data, ttl); - } - - async getIndicator(symbol: string, indicator: string, period: number): Promise { - const key = `indicator:${symbol}:${indicator}:${period}`; - return this.get(key); - } - - /** - * Close the Redis connection - */ - async disconnect(): Promise { - await this.redis.quit(); - this.logger.info('Redis cache disconnected'); - } -} +import Redis from 'ioredis'; +import { getLogger } from '@stock-bot/logger'; +import { dragonflyConfig } from '@stock-bot/config'; +import { CacheProvider, CacheOptions, CacheStats } from '../types'; + +/** + * Redis-based cache provider with comprehensive error handling and metrics + */ +export class RedisCache implements CacheProvider { + private redis: Redis; + private logger = getLogger('redis-cache'); + private defaultTTL: number; + private keyPrefix: string; + private enableMetrics: boolean; + private isConnected = false; + private startTime = Date.now(); + + private stats: CacheStats = { + hits: 0, + misses: 0, + errors: 0, + hitRate: 0, + total: 0, + uptime: 0 + }; + + constructor(options: CacheOptions = {}) { + this.defaultTTL = options.ttl ?? 3600; // 1 hour default + this.keyPrefix = options.keyPrefix ?? 'cache:'; + this.enableMetrics = options.enableMetrics ?? true; + + const redisConfig = { + host: dragonflyConfig.DRAGONFLY_HOST, + port: dragonflyConfig.DRAGONFLY_PORT, + password: dragonflyConfig.DRAGONFLY_PASSWORD || undefined, + username: dragonflyConfig.DRAGONFLY_USERNAME || undefined, + db: dragonflyConfig.DRAGONFLY_DATABASE, + maxRetriesPerRequest: dragonflyConfig.DRAGONFLY_MAX_RETRIES, + retryDelayOnFailover: dragonflyConfig.DRAGONFLY_RETRY_DELAY, + connectTimeout: dragonflyConfig.DRAGONFLY_CONNECT_TIMEOUT, + commandTimeout: dragonflyConfig.DRAGONFLY_COMMAND_TIMEOUT, + keepAlive: dragonflyConfig.DRAGONFLY_ENABLE_KEEPALIVE ? dragonflyConfig.DRAGONFLY_KEEPALIVE_INTERVAL * 1000 : 0, + ...(dragonflyConfig.DRAGONFLY_TLS && { + tls: { + cert: dragonflyConfig.DRAGONFLY_TLS_CERT_FILE || undefined, + key: dragonflyConfig.DRAGONFLY_TLS_KEY_FILE || undefined, + ca: dragonflyConfig.DRAGONFLY_TLS_CA_FILE || undefined, + rejectUnauthorized: !dragonflyConfig.DRAGONFLY_TLS_SKIP_VERIFY, + } + }) + }; + + this.redis = new Redis(redisConfig); + this.setupEventHandlers(); + } + + private setupEventHandlers(): void { + this.redis.on('connect', () => { + this.isConnected = true; + this.logger.info('Redis cache connected', { + host: dragonflyConfig.DRAGONFLY_HOST, + port: dragonflyConfig.DRAGONFLY_PORT, + db: dragonflyConfig.DRAGONFLY_DATABASE + }); + }); + + this.redis.on('ready', () => { + this.logger.info('Redis cache ready for commands'); + }); + + this.redis.on('error', (error) => { + this.isConnected = false; + this.stats.errors++; + this.logger.error('Redis cache connection error', { error: error.message }); + }); + + this.redis.on('close', () => { + this.isConnected = false; + this.logger.warn('Redis cache connection closed'); + }); + + this.redis.on('reconnecting', () => { + this.logger.info('Redis cache reconnecting...'); + }); + } + + private getKey(key: string): string { + return `${this.keyPrefix}${key}`; + } + + private updateStats(hit: boolean, error = false): void { + if (!this.enableMetrics) return; + + if (error) { + this.stats.errors++; + } else if (hit) { + this.stats.hits++; + } else { + this.stats.misses++; + } + + this.stats.total = this.stats.hits + this.stats.misses; + this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0; + this.stats.uptime = Date.now() - this.startTime; + } + + private async safeExecute( + operation: () => Promise, + fallback: T, + operationName: string + ): Promise { + if (!this.isConnected) { + this.logger.warn(`Redis not connected for ${operationName}, using fallback`); + this.updateStats(false, true); + return fallback; + } + + try { + return await operation(); + } catch (error) { + this.logger.error(`Redis ${operationName} failed`, { + error: error instanceof Error ? error.message : String(error) + }); + this.updateStats(false, true); + return fallback; + } + } + + async get(key: string): Promise { + return this.safeExecute( + async () => { + const fullKey = this.getKey(key); + const value = await this.redis.get(fullKey); + + if (value === null) { + this.updateStats(false); + this.logger.debug('Cache miss', { key }); + return null; + } + + this.updateStats(true); + this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate }); + + try { + return JSON.parse(value) as T; + } catch { + // Return as-is if not valid JSON + return value as unknown as T; + } + }, + null, + 'get' + ); + } + + async set(key: string, value: T, ttl?: number): Promise { + await this.safeExecute( + async () => { + const fullKey = this.getKey(key); + const serialized = typeof value === 'string' ? value : JSON.stringify(value); + const expiry = ttl ?? this.defaultTTL; + + await this.redis.setex(fullKey, expiry, serialized); + this.logger.debug('Cache set', { key, ttl: expiry }); + }, + undefined, + 'set' + ); + } + + async del(key: string): Promise { + await this.safeExecute( + async () => { + const fullKey = this.getKey(key); + await this.redis.del(fullKey); + this.logger.debug('Cache delete', { key }); + }, + undefined, + 'del' + ); + } + + async exists(key: string): Promise { + return this.safeExecute( + async () => { + const fullKey = this.getKey(key); + const result = await this.redis.exists(fullKey); + return result === 1; + }, + false, + 'exists' + ); + } + + async clear(): Promise { + await this.safeExecute( + async () => { + const pattern = `${this.keyPrefix}*`; + const keys = await this.redis.keys(pattern); + if (keys.length > 0) { + await this.redis.del(...keys); + this.logger.info('Cache cleared', { keysDeleted: keys.length }); + } + }, + undefined, + 'clear' + ); + } + + async health(): Promise { + try { + const pong = await this.redis.ping(); + return pong === 'PONG' && this.isConnected; + } catch (error) { + this.logger.error('Redis health check failed', error); + return false; + } + } + + getStats(): CacheStats { + return { + ...this.stats, + uptime: Date.now() - this.startTime + }; + } + + /** + * Trading-specific convenience methods + */ + async cacheMarketData(symbol: string, timeframe: string, data: any[], ttl = 300): Promise { + const key = `market:${symbol}:${timeframe}`; + await this.set(key, data, ttl); + } + + async getMarketData(symbol: string, timeframe: string): Promise { + const key = `market:${symbol}:${timeframe}`; + return this.get(key); + } + + async cacheIndicator( + symbol: string, + indicator: string, + period: number, + data: number[], + ttl = 600 + ): Promise { + const key = `indicator:${symbol}:${indicator}:${period}`; + await this.set(key, data, ttl); + } + + async getIndicator(symbol: string, indicator: string, period: number): Promise { + const key = `indicator:${symbol}:${indicator}:${period}`; + return this.get(key); + } + + /** + * Close the Redis connection + */ + async disconnect(): Promise { + await this.redis.quit(); + this.logger.info('Redis cache disconnected'); + } +} diff --git a/libs/cache/src/types.ts b/libs/cache/src/types.ts index 2551578..1f19cce 100644 --- a/libs/cache/src/types.ts +++ b/libs/cache/src/types.ts @@ -1,42 +1,42 @@ -export interface CacheProvider { - get(key: string): Promise; - set(key: string, value: T, ttl?: number): Promise; - del(key: string): Promise; - exists(key: string): Promise; - clear(): Promise; - getStats(): CacheStats; - health(): Promise; -} - -export interface CacheOptions { - ttl?: number; - keyPrefix?: string; - enableMetrics?: boolean; - maxMemoryItems?: number; - memoryTTL?: number; -} - -export interface CacheStats { - hits: number; - misses: number; - errors: number; - hitRate: number; - total: number; - uptime: number; -} - -export interface CacheConfig { - type: 'redis' | 'memory' | 'hybrid'; - keyPrefix?: string; - defaultTTL?: number; - maxMemoryItems?: number; - enableMetrics?: boolean; - compression?: boolean; -} - -export type CacheKey = string | (() => string); - -export interface SerializationOptions { - compress?: boolean; - binary?: boolean; -} +export interface CacheProvider { + get(key: string): Promise; + set(key: string, value: T, ttl?: number): Promise; + del(key: string): Promise; + exists(key: string): Promise; + clear(): Promise; + getStats(): CacheStats; + health(): Promise; +} + +export interface CacheOptions { + ttl?: number; + keyPrefix?: string; + enableMetrics?: boolean; + maxMemoryItems?: number; + memoryTTL?: number; +} + +export interface CacheStats { + hits: number; + misses: number; + errors: number; + hitRate: number; + total: number; + uptime: number; +} + +export interface CacheConfig { + type: 'redis' | 'memory' | 'hybrid'; + keyPrefix?: string; + defaultTTL?: number; + maxMemoryItems?: number; + enableMetrics?: boolean; + compression?: boolean; +} + +export type CacheKey = string | (() => string); + +export interface SerializationOptions { + compress?: boolean; + binary?: boolean; +} diff --git a/libs/cache/tsconfig.json b/libs/cache/tsconfig.json index 3030b42..e8f78e0 100644 --- a/libs/cache/tsconfig.json +++ b/libs/cache/tsconfig.json @@ -1,13 +1,13 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../config" }, - { "path": "../logger" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../config" }, + { "path": "../logger" } + ] +} diff --git a/libs/cache/turbo.json b/libs/cache/turbo.json index 7632db9..c630cca 100644 --- a/libs/cache/turbo.json +++ b/libs/cache/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/config/README.md b/libs/config/README.md index d6551e3..c39802b 100644 --- a/libs/config/README.md +++ b/libs/config/README.md @@ -1,103 +1,103 @@ -# @stock-bot/config - -A configuration management library for the Stock Bot trading platform. - -## Overview - -This library provides a centralized way to manage configurations across all Stock Bot microservices and components. It includes: - -- Environment-based configuration loading -- Strong TypeScript typing and validation using Zod -- Default configurations for services -- Environment variable parsing helpers -- Service-specific configuration modules - -## Usage - -### Basic Usage - -```typescript -import { databaseConfig, dataProviderConfigs, riskConfig } from '@stock-bot/config'; - -// Access database configuration -const dragonflyHost = databaseConfig.dragonfly.host; - -// Access data provider configuration -const alpacaApiKey = dataProviderConfigs.providers.find(p => p.name === 'alpaca')?.apiKey; - -// Access risk configuration -const maxPositionSize = riskConfig.maxPositionSize; -``` - -### Service-Specific Configuration - -```typescript -import { marketDataGatewayConfig, riskGuardianConfig } from '@stock-bot/config'; - -// Access Market Data Gateway configuration -const websocketPath = marketDataGatewayConfig.websocket.path; - -// Access Risk Guardian configuration -const preTradeValidation = riskGuardianConfig.riskChecks.preTradeValidation; -``` - -### Environment Variables - -The library automatically loads environment variables from `.env` files. You can create environment-specific files: - -- `.env` - Base environment variables -- `.env.development` - Development-specific variables -- `.env.production` - Production-specific variables -- `.env.local` - Local overrides (not to be committed to git) - -## Configuration Modules - -### Core Configuration - -- `Environment` - Enum for different environments -- `loadEnvVariables()` - Load environment variables from .env files -- `getEnvironment()` - Get the current environment -- `validateConfig()` - Validate configuration with Zod schema - -### Database Configuration - -- `databaseConfig` - Database connection settings (Dragonfly, QuestDB, MongoDB, PostgreSQL) - -### Data Provider Configuration - -- `dataProviderConfigs` - Settings for market data providers - -### Risk Configuration - -- `riskConfig` - Risk management parameters (max drawdown, position size, etc.) - -### Service-Specific Configuration - -- `marketDataGatewayConfig` - Configs for the Market Data Gateway service -- `riskGuardianConfig` - Configs for the Risk Guardian service - -## Extending - -To add a new service configuration: - -1. Create a new file in `src/services/` -2. Define a Zod schema for validation -3. Create loading and default configuration functions -4. Export from `src/services/index.ts` -5. The new configuration will be automatically available from the main package - -## Development - -```bash -# Install dependencies -bun install - -# Run tests -bun test - -# Type check -bun run type-check - -# Lint -bun run lint -``` +# @stock-bot/config + +A configuration management library for the Stock Bot trading platform. + +## Overview + +This library provides a centralized way to manage configurations across all Stock Bot microservices and components. It includes: + +- Environment-based configuration loading +- Strong TypeScript typing and validation using Zod +- Default configurations for services +- Environment variable parsing helpers +- Service-specific configuration modules + +## Usage + +### Basic Usage + +```typescript +import { databaseConfig, dataProviderConfigs, riskConfig } from '@stock-bot/config'; + +// Access database configuration +const dragonflyHost = databaseConfig.dragonfly.host; + +// Access data provider configuration +const alpacaApiKey = dataProviderConfigs.providers.find(p => p.name === 'alpaca')?.apiKey; + +// Access risk configuration +const maxPositionSize = riskConfig.maxPositionSize; +``` + +### Service-Specific Configuration + +```typescript +import { marketDataGatewayConfig, riskGuardianConfig } from '@stock-bot/config'; + +// Access Market Data Gateway configuration +const websocketPath = marketDataGatewayConfig.websocket.path; + +// Access Risk Guardian configuration +const preTradeValidation = riskGuardianConfig.riskChecks.preTradeValidation; +``` + +### Environment Variables + +The library automatically loads environment variables from `.env` files. You can create environment-specific files: + +- `.env` - Base environment variables +- `.env.development` - Development-specific variables +- `.env.production` - Production-specific variables +- `.env.local` - Local overrides (not to be committed to git) + +## Configuration Modules + +### Core Configuration + +- `Environment` - Enum for different environments +- `loadEnvVariables()` - Load environment variables from .env files +- `getEnvironment()` - Get the current environment +- `validateConfig()` - Validate configuration with Zod schema + +### Database Configuration + +- `databaseConfig` - Database connection settings (Dragonfly, QuestDB, MongoDB, PostgreSQL) + +### Data Provider Configuration + +- `dataProviderConfigs` - Settings for market data providers + +### Risk Configuration + +- `riskConfig` - Risk management parameters (max drawdown, position size, etc.) + +### Service-Specific Configuration + +- `marketDataGatewayConfig` - Configs for the Market Data Gateway service +- `riskGuardianConfig` - Configs for the Risk Guardian service + +## Extending + +To add a new service configuration: + +1. Create a new file in `src/services/` +2. Define a Zod schema for validation +3. Create loading and default configuration functions +4. Export from `src/services/index.ts` +5. The new configuration will be automatically available from the main package + +## Development + +```bash +# Install dependencies +bun install + +# Run tests +bun test + +# Type check +bun run type-check + +# Lint +bun run lint +``` diff --git a/libs/config/USAGE.md b/libs/config/USAGE.md index 89fb3d2..b3fc5cc 100644 --- a/libs/config/USAGE.md +++ b/libs/config/USAGE.md @@ -1,131 +1,131 @@ -# Stock Bot Configuration Library Usage Guide - -This guide shows how to use the Zod-based configuration system in the Stock Bot platform. - -## Quick Start - -```typescript -import { databaseConfig, loggingConfig, riskConfig, dataProvidersConfig } from '@stock-bot/config'; - -// Access individual values -console.log(`Database: ${databaseConfig.POSTGRES_HOST}:${databaseConfig.POSTGRES_PORT}`); -console.log(`Log level: ${loggingConfig.LOG_LEVEL}`); -console.log(`Max position size: ${riskConfig.RISK_MAX_POSITION_SIZE}`); -``` - -## Environment Variables - -All configuration is driven by environment variables. You can set them in: -- `.env` files -- System environment variables -- Docker environment variables - -### Database Configuration -```bash -DB_HOST=localhost -DB_PORT=5432 -DB_NAME=stockbot -DB_USER=stockbot -DB_PASSWORD=your_password -DB_SSL=false -DB_POOL_MAX=10 -``` - -### Logging Configuration -```bash -LOG_LEVEL=info -LOG_CONSOLE=true -LOKI_HOST=localhost -LOKI_PORT=3100 -LOKI_LABELS=service=market-data-gateway,version=1.0.0 -``` - -### Risk Management Configuration -```bash -RISK_MAX_POSITION_SIZE=0.1 -RISK_DEFAULT_STOP_LOSS=0.05 -RISK_DEFAULT_TAKE_PROFIT=0.15 -RISK_CIRCUIT_BREAKER_ENABLED=true -``` - -### Data Provider Configuration -```bash -DEFAULT_DATA_PROVIDER=alpaca -ALPACA_API_KEY=your_api_key -ALPACA_API_SECRET=your_api_secret -ALPACA_ENABLED=true -POLYGON_ENABLED=false -``` - -## Advanced Usage - -### Type Safety -All configurations are fully typed: - -```typescript -import type { DatabaseConfig, LoggingConfig, RiskConfig } from '@stock-bot/config'; - -function setupDatabase(config: DatabaseConfig) { - // TypeScript knows all the available properties - return { - host: config.POSTGRES_HOST, - port: config.POSTGRES_PORT, // number - ssl: config.POSTGRES_SSL, // boolean - }; -} -``` - -### Environment Detection -```typescript -import { getEnvironment, Environment } from '@stock-bot/config'; - -const env = getEnvironment(); -if (env === Environment.Production) { - // Production-specific logic -} -``` - -### Data Provider Helpers -```typescript -import { getProviderConfig, getEnabledProviders, getDefaultProvider } from '@stock-bot/config'; - -// Get specific provider -const alpaca = getProviderConfig('alpaca'); - -// Get all enabled providers -const providers = getEnabledProviders(); - -// Get default provider -const defaultProvider = getDefaultProvider(); -``` - -## Configuration Files - -The library consists of these modules: - -- **core.ts** - Core utilities and environment detection -- **database.ts** - Database connection settings -- **logging.ts** - Logging and Loki configuration -- **risk.ts** - Risk management parameters -- **data-providers.ts** - Data provider settings - -## Benefits of This Approach - -1. **Zero Configuration Schema** - No complex schema definitions needed -2. **Automatic Type Inference** - TypeScript types are generated automatically -3. **Environment Variable Validation** - Invalid values are caught at startup -4. **Great Developer Experience** - IntelliSense works perfectly -5. **Production Ready** - Used by many large-scale applications - -## Migration from Previous System - -If you're migrating from the old Valibot-based system: - -```typescript -// Old way -const config = createConfigLoader('database', databaseSchema, defaultConfig)(); - -// New way -import { databaseConfig } from '@stock-bot/config'; -// That's it! No schema needed, no validation needed, no complex setup. -``` +# Stock Bot Configuration Library Usage Guide + +This guide shows how to use the Zod-based configuration system in the Stock Bot platform. + +## Quick Start + +```typescript +import { databaseConfig, loggingConfig, riskConfig, dataProvidersConfig } from '@stock-bot/config'; + +// Access individual values +console.log(`Database: ${databaseConfig.POSTGRES_HOST}:${databaseConfig.POSTGRES_PORT}`); +console.log(`Log level: ${loggingConfig.LOG_LEVEL}`); +console.log(`Max position size: ${riskConfig.RISK_MAX_POSITION_SIZE}`); +``` + +## Environment Variables + +All configuration is driven by environment variables. You can set them in: +- `.env` files +- System environment variables +- Docker environment variables + +### Database Configuration +```bash +DB_HOST=localhost +DB_PORT=5432 +DB_NAME=stockbot +DB_USER=stockbot +DB_PASSWORD=your_password +DB_SSL=false +DB_POOL_MAX=10 +``` + +### Logging Configuration +```bash +LOG_LEVEL=info +LOG_CONSOLE=true +LOKI_HOST=localhost +LOKI_PORT=3100 +LOKI_LABELS=service=market-data-gateway,version=1.0.0 +``` + +### Risk Management Configuration +```bash +RISK_MAX_POSITION_SIZE=0.1 +RISK_DEFAULT_STOP_LOSS=0.05 +RISK_DEFAULT_TAKE_PROFIT=0.15 +RISK_CIRCUIT_BREAKER_ENABLED=true +``` + +### Data Provider Configuration +```bash +DEFAULT_DATA_PROVIDER=alpaca +ALPACA_API_KEY=your_api_key +ALPACA_API_SECRET=your_api_secret +ALPACA_ENABLED=true +POLYGON_ENABLED=false +``` + +## Advanced Usage + +### Type Safety +All configurations are fully typed: + +```typescript +import type { DatabaseConfig, LoggingConfig, RiskConfig } from '@stock-bot/config'; + +function setupDatabase(config: DatabaseConfig) { + // TypeScript knows all the available properties + return { + host: config.POSTGRES_HOST, + port: config.POSTGRES_PORT, // number + ssl: config.POSTGRES_SSL, // boolean + }; +} +``` + +### Environment Detection +```typescript +import { getEnvironment, Environment } from '@stock-bot/config'; + +const env = getEnvironment(); +if (env === Environment.Production) { + // Production-specific logic +} +``` + +### Data Provider Helpers +```typescript +import { getProviderConfig, getEnabledProviders, getDefaultProvider } from '@stock-bot/config'; + +// Get specific provider +const alpaca = getProviderConfig('alpaca'); + +// Get all enabled providers +const providers = getEnabledProviders(); + +// Get default provider +const defaultProvider = getDefaultProvider(); +``` + +## Configuration Files + +The library consists of these modules: + +- **core.ts** - Core utilities and environment detection +- **database.ts** - Database connection settings +- **logging.ts** - Logging and Loki configuration +- **risk.ts** - Risk management parameters +- **data-providers.ts** - Data provider settings + +## Benefits of This Approach + +1. **Zero Configuration Schema** - No complex schema definitions needed +2. **Automatic Type Inference** - TypeScript types are generated automatically +3. **Environment Variable Validation** - Invalid values are caught at startup +4. **Great Developer Experience** - IntelliSense works perfectly +5. **Production Ready** - Used by many large-scale applications + +## Migration from Previous System + +If you're migrating from the old Valibot-based system: + +```typescript +// Old way +const config = createConfigLoader('database', databaseSchema, defaultConfig)(); + +// New way +import { databaseConfig } from '@stock-bot/config'; +// That's it! No schema needed, no validation needed, no complex setup. +``` diff --git a/libs/config/bunfig.toml b/libs/config/bunfig.toml index 9e452c2..5c300fc 100644 --- a/libs/config/bunfig.toml +++ b/libs/config/bunfig.toml @@ -1,15 +1,15 @@ -[test] -# Configure path mapping for tests -preload = ["./test/setup.ts"] - -# Test configuration -timeout = 5000 - -# Set test environment -env = { NODE_ENV = "test" } - -[bun] -# Enable TypeScript paths resolution -paths = { - "@/*" = ["./src/*"] -} +[test] +# Configure path mapping for tests +preload = ["./test/setup.ts"] + +# Test configuration +timeout = 5000 + +# Set test environment +env = { NODE_ENV = "test" } + +[bun] +# Enable TypeScript paths resolution +paths = { + "@/*" = ["./src/*"] +} diff --git a/libs/config/package.json b/libs/config/package.json index 29f4fc6..96c5e4b 100644 --- a/libs/config/package.json +++ b/libs/config/package.json @@ -1,44 +1,44 @@ -{ - "name": "@stock-bot/config", - "version": "1.0.0", - "description": "Configuration management library for Stock Bot platform", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "test": "bun test", - "lint": "eslint src/**/*.ts", - "type-check": "tsc --noEmit", - "clean": "rimraf dist" - }, - "dependencies": { - "dotenv": "^16.5.0", - "yup": "^1.6.1" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "eslint": "^8.56.0", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15" - }, - "keywords": [ - "configuration", - "settings", - "env", - "stock-bot" - ], - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] -} +{ + "name": "@stock-bot/config", + "version": "1.0.0", + "description": "Configuration management library for Stock Bot platform", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "test": "bun test", + "lint": "eslint src/**/*.ts", + "type-check": "tsc --noEmit", + "clean": "rimraf dist" + }, + "dependencies": { + "dotenv": "^16.5.0", + "yup": "^1.6.1" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "eslint": "^8.56.0", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15" + }, + "keywords": [ + "configuration", + "settings", + "env", + "stock-bot" + ], + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] +} diff --git a/libs/config/setup.bat b/libs/config/setup.bat index 9b3e9b2..a2cc8b7 100644 --- a/libs/config/setup.bat +++ b/libs/config/setup.bat @@ -1,24 +1,24 @@ -@echo off -echo Building @stock-bot/config library... - -cd /d g:\repos\stock-bot -echo Installing dependencies... -bun install - -echo Running type check... -cd /d g:\repos\stock-bot\libs\config -bun run type-check - -echo Running tests... -bun test - -echo Setting up example configuration... -copy .env.example .env - -echo Running example to display configuration... -bun run src/example.ts - -echo. -echo Configuration library setup complete! -echo. -echo You can now import @stock-bot/config in your services. +@echo off +echo Building @stock-bot/config library... + +cd /d g:\repos\stock-bot +echo Installing dependencies... +bun install + +echo Running type check... +cd /d g:\repos\stock-bot\libs\config +bun run type-check + +echo Running tests... +bun test + +echo Setting up example configuration... +copy .env.example .env + +echo Running example to display configuration... +bun run src/example.ts + +echo. +echo Configuration library setup complete! +echo. +echo You can now import @stock-bot/config in your services. diff --git a/libs/config/src/admin-interfaces.ts b/libs/config/src/admin-interfaces.ts index 6a2b3e5..74a9b6d 100644 --- a/libs/config/src/admin-interfaces.ts +++ b/libs/config/src/admin-interfaces.ts @@ -1,111 +1,111 @@ -/** - * Admin interfaces configuration using Yup - * PgAdmin, Mongo Express, Redis Insight for database management - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, port, bool, strWithChoices } = envValidators; - -/** - * PgAdmin configuration with validation and defaults - */ -export const pgAdminConfig = cleanEnv(process.env, { - // PgAdmin Server - PGADMIN_HOST: str('localhost', 'PgAdmin host'), - PGADMIN_PORT: port(8080, 'PgAdmin port'), - - // Authentication - PGADMIN_DEFAULT_EMAIL: str('admin@tradingbot.local', 'PgAdmin default admin email'), - PGADMIN_DEFAULT_PASSWORD: str('admin123', 'PgAdmin default admin password'), - - // Configuration - PGADMIN_SERVER_MODE: bool(false, 'Enable server mode (multi-user)'), - PGADMIN_DISABLE_POSTFIX: bool(true, 'Disable postfix for email'), - PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION: bool(true, 'Enhanced cookie protection'), - - // Security - PGADMIN_MASTER_PASSWORD_REQUIRED: bool(false, 'Require master password'), - PGADMIN_SESSION_TIMEOUT: str('60', 'Session timeout in minutes'), -}); - -/** - * Mongo Express configuration with validation and defaults - */ -export const mongoExpressConfig = cleanEnv(process.env, { - // Mongo Express Server - MONGO_EXPRESS_HOST: str('localhost', 'Mongo Express host'), - MONGO_EXPRESS_PORT: port(8081, 'Mongo Express port'), - - // MongoDB Connection - MONGO_EXPRESS_MONGODB_SERVER: str('mongodb', 'MongoDB server name/host'), - MONGO_EXPRESS_MONGODB_PORT: port(27017, 'MongoDB port'), - MONGO_EXPRESS_MONGODB_ADMINUSERNAME: str('trading_admin', 'MongoDB admin username'), - MONGO_EXPRESS_MONGODB_ADMINPASSWORD: str('', 'MongoDB admin password'), - - // Basic Authentication for Mongo Express - MONGO_EXPRESS_BASICAUTH_USERNAME: str('admin', 'Basic auth username for Mongo Express'), - MONGO_EXPRESS_BASICAUTH_PASSWORD: str('admin123', 'Basic auth password for Mongo Express'), - - // Configuration - MONGO_EXPRESS_ENABLE_ADMIN: bool(true, 'Enable admin features'), - MONGO_EXPRESS_OPTIONS_EDITOR_THEME: str('rubyblue', 'Editor theme (rubyblue, 3024-night, etc.)'), - MONGO_EXPRESS_REQUEST_SIZE: str('100kb', 'Maximum request size'), -}); - -/** - * Redis Insight configuration with validation and defaults - */ -export const redisInsightConfig = cleanEnv(process.env, { - // Redis Insight Server - REDIS_INSIGHT_HOST: str('localhost', 'Redis Insight host'), - REDIS_INSIGHT_PORT: port(8001, 'Redis Insight port'), - - // Redis Connection Settings - REDIS_INSIGHT_REDIS_HOSTS: str('local:dragonfly:6379', 'Redis hosts in format name:host:port,name:host:port'), - - // Configuration - REDIS_INSIGHT_LOG_LEVEL: strWithChoices(['error', 'warn', 'info', 'verbose', 'debug'], 'info', 'Redis Insight log level'), - REDIS_INSIGHT_DISABLE_ANALYTICS: bool(true, 'Disable analytics collection'), - REDIS_INSIGHT_BUILD_TYPE: str('DOCKER', 'Build type identifier'), -}); - -// Export typed configuration objects -export type PgAdminConfig = typeof pgAdminConfig; -export type MongoExpressConfig = typeof mongoExpressConfig; -export type RedisInsightConfig = typeof redisInsightConfig; - -// Export individual config values for convenience -export const { - PGADMIN_HOST, - PGADMIN_PORT, - PGADMIN_DEFAULT_EMAIL, - PGADMIN_DEFAULT_PASSWORD, - PGADMIN_SERVER_MODE, - PGADMIN_DISABLE_POSTFIX, - PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION, - PGADMIN_MASTER_PASSWORD_REQUIRED, - PGADMIN_SESSION_TIMEOUT, -} = pgAdminConfig; - -export const { - MONGO_EXPRESS_HOST, - MONGO_EXPRESS_PORT, - MONGO_EXPRESS_MONGODB_SERVER, - MONGO_EXPRESS_MONGODB_PORT, - MONGO_EXPRESS_MONGODB_ADMINUSERNAME, - MONGO_EXPRESS_MONGODB_ADMINPASSWORD, - MONGO_EXPRESS_BASICAUTH_USERNAME, - MONGO_EXPRESS_BASICAUTH_PASSWORD, - MONGO_EXPRESS_ENABLE_ADMIN, - MONGO_EXPRESS_OPTIONS_EDITOR_THEME, - MONGO_EXPRESS_REQUEST_SIZE, -} = mongoExpressConfig; - -export const { - REDIS_INSIGHT_HOST, - REDIS_INSIGHT_PORT, - REDIS_INSIGHT_REDIS_HOSTS, - REDIS_INSIGHT_LOG_LEVEL, - REDIS_INSIGHT_DISABLE_ANALYTICS, - REDIS_INSIGHT_BUILD_TYPE, -} = redisInsightConfig; +/** + * Admin interfaces configuration using Yup + * PgAdmin, Mongo Express, Redis Insight for database management + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, port, bool, strWithChoices } = envValidators; + +/** + * PgAdmin configuration with validation and defaults + */ +export const pgAdminConfig = cleanEnv(process.env, { + // PgAdmin Server + PGADMIN_HOST: str('localhost', 'PgAdmin host'), + PGADMIN_PORT: port(8080, 'PgAdmin port'), + + // Authentication + PGADMIN_DEFAULT_EMAIL: str('admin@tradingbot.local', 'PgAdmin default admin email'), + PGADMIN_DEFAULT_PASSWORD: str('admin123', 'PgAdmin default admin password'), + + // Configuration + PGADMIN_SERVER_MODE: bool(false, 'Enable server mode (multi-user)'), + PGADMIN_DISABLE_POSTFIX: bool(true, 'Disable postfix for email'), + PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION: bool(true, 'Enhanced cookie protection'), + + // Security + PGADMIN_MASTER_PASSWORD_REQUIRED: bool(false, 'Require master password'), + PGADMIN_SESSION_TIMEOUT: str('60', 'Session timeout in minutes'), +}); + +/** + * Mongo Express configuration with validation and defaults + */ +export const mongoExpressConfig = cleanEnv(process.env, { + // Mongo Express Server + MONGO_EXPRESS_HOST: str('localhost', 'Mongo Express host'), + MONGO_EXPRESS_PORT: port(8081, 'Mongo Express port'), + + // MongoDB Connection + MONGO_EXPRESS_MONGODB_SERVER: str('mongodb', 'MongoDB server name/host'), + MONGO_EXPRESS_MONGODB_PORT: port(27017, 'MongoDB port'), + MONGO_EXPRESS_MONGODB_ADMINUSERNAME: str('trading_admin', 'MongoDB admin username'), + MONGO_EXPRESS_MONGODB_ADMINPASSWORD: str('', 'MongoDB admin password'), + + // Basic Authentication for Mongo Express + MONGO_EXPRESS_BASICAUTH_USERNAME: str('admin', 'Basic auth username for Mongo Express'), + MONGO_EXPRESS_BASICAUTH_PASSWORD: str('admin123', 'Basic auth password for Mongo Express'), + + // Configuration + MONGO_EXPRESS_ENABLE_ADMIN: bool(true, 'Enable admin features'), + MONGO_EXPRESS_OPTIONS_EDITOR_THEME: str('rubyblue', 'Editor theme (rubyblue, 3024-night, etc.)'), + MONGO_EXPRESS_REQUEST_SIZE: str('100kb', 'Maximum request size'), +}); + +/** + * Redis Insight configuration with validation and defaults + */ +export const redisInsightConfig = cleanEnv(process.env, { + // Redis Insight Server + REDIS_INSIGHT_HOST: str('localhost', 'Redis Insight host'), + REDIS_INSIGHT_PORT: port(8001, 'Redis Insight port'), + + // Redis Connection Settings + REDIS_INSIGHT_REDIS_HOSTS: str('local:dragonfly:6379', 'Redis hosts in format name:host:port,name:host:port'), + + // Configuration + REDIS_INSIGHT_LOG_LEVEL: strWithChoices(['error', 'warn', 'info', 'verbose', 'debug'], 'info', 'Redis Insight log level'), + REDIS_INSIGHT_DISABLE_ANALYTICS: bool(true, 'Disable analytics collection'), + REDIS_INSIGHT_BUILD_TYPE: str('DOCKER', 'Build type identifier'), +}); + +// Export typed configuration objects +export type PgAdminConfig = typeof pgAdminConfig; +export type MongoExpressConfig = typeof mongoExpressConfig; +export type RedisInsightConfig = typeof redisInsightConfig; + +// Export individual config values for convenience +export const { + PGADMIN_HOST, + PGADMIN_PORT, + PGADMIN_DEFAULT_EMAIL, + PGADMIN_DEFAULT_PASSWORD, + PGADMIN_SERVER_MODE, + PGADMIN_DISABLE_POSTFIX, + PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION, + PGADMIN_MASTER_PASSWORD_REQUIRED, + PGADMIN_SESSION_TIMEOUT, +} = pgAdminConfig; + +export const { + MONGO_EXPRESS_HOST, + MONGO_EXPRESS_PORT, + MONGO_EXPRESS_MONGODB_SERVER, + MONGO_EXPRESS_MONGODB_PORT, + MONGO_EXPRESS_MONGODB_ADMINUSERNAME, + MONGO_EXPRESS_MONGODB_ADMINPASSWORD, + MONGO_EXPRESS_BASICAUTH_USERNAME, + MONGO_EXPRESS_BASICAUTH_PASSWORD, + MONGO_EXPRESS_ENABLE_ADMIN, + MONGO_EXPRESS_OPTIONS_EDITOR_THEME, + MONGO_EXPRESS_REQUEST_SIZE, +} = mongoExpressConfig; + +export const { + REDIS_INSIGHT_HOST, + REDIS_INSIGHT_PORT, + REDIS_INSIGHT_REDIS_HOSTS, + REDIS_INSIGHT_LOG_LEVEL, + REDIS_INSIGHT_DISABLE_ANALYTICS, + REDIS_INSIGHT_BUILD_TYPE, +} = redisInsightConfig; diff --git a/libs/config/src/core.ts b/libs/config/src/core.ts index b40534e..2e9e5fb 100644 --- a/libs/config/src/core.ts +++ b/libs/config/src/core.ts @@ -1,68 +1,68 @@ -/** - * Core configuration module for the Stock Bot platform using Yup - */ -import { config as dotenvConfig } from 'dotenv'; -import path from 'node:path'; - -/** - * Represents an error related to configuration validation - */ -export class ConfigurationError extends Error { - constructor(message: string) { - super(message); - this.name = 'ConfigurationError'; - } -} - -/** - * Environment types - */ -export enum Environment { - Development = 'development', - Testing = 'testing', - Staging = 'staging', - Production = 'production' -} - -/** - * Loads environment variables from .env files based on the current environment - */ -export function loadEnvVariables(envOverride?: string): void { - const env = envOverride || process.env.NODE_ENV || 'development'; - console.log(`Current environment: ${env}`); - // Order of loading: - // 1. .env (base environment variables) - // 2. .env.{environment} (environment-specific variables) - // 3. .env.local (local overrides, not to be committed) - - const envFiles = [ - '.env', - `.env.${env}`, - '.env.local' - ]; - - for (const file of envFiles) { - dotenvConfig({ path: path.resolve(process.cwd(), file) }); - } -} - -/** - * Gets the current environment from process.env.NODE_ENV - */ -export function getEnvironment(): Environment { - const env = process.env.NODE_ENV?.toLowerCase() || 'development'; - switch (env) { - case 'development': - return Environment.Development; - case 'testing': - case 'test': // Handle both 'test' and 'testing' for compatibility - return Environment.Testing; - case 'staging': - return Environment.Staging; - case 'production': - return Environment.Production; - default: - return Environment.Development; - - } -} +/** + * Core configuration module for the Stock Bot platform using Yup + */ +import { config as dotenvConfig } from 'dotenv'; +import path from 'node:path'; + +/** + * Represents an error related to configuration validation + */ +export class ConfigurationError extends Error { + constructor(message: string) { + super(message); + this.name = 'ConfigurationError'; + } +} + +/** + * Environment types + */ +export enum Environment { + Development = 'development', + Testing = 'testing', + Staging = 'staging', + Production = 'production' +} + +/** + * Loads environment variables from .env files based on the current environment + */ +export function loadEnvVariables(envOverride?: string): void { + const env = envOverride || process.env.NODE_ENV || 'development'; + console.log(`Current environment: ${env}`); + // Order of loading: + // 1. .env (base environment variables) + // 2. .env.{environment} (environment-specific variables) + // 3. .env.local (local overrides, not to be committed) + + const envFiles = [ + '.env', + `.env.${env}`, + '.env.local' + ]; + + for (const file of envFiles) { + dotenvConfig({ path: path.resolve(process.cwd(), file) }); + } +} + +/** + * Gets the current environment from process.env.NODE_ENV + */ +export function getEnvironment(): Environment { + const env = process.env.NODE_ENV?.toLowerCase() || 'development'; + switch (env) { + case 'development': + return Environment.Development; + case 'testing': + case 'test': // Handle both 'test' and 'testing' for compatibility + return Environment.Testing; + case 'staging': + return Environment.Staging; + case 'production': + return Environment.Production; + default: + return Environment.Development; + + } +} diff --git a/libs/config/src/data-providers.ts b/libs/config/src/data-providers.ts index 30b911a..02eef3a 100644 --- a/libs/config/src/data-providers.ts +++ b/libs/config/src/data-providers.ts @@ -1,184 +1,184 @@ -/** - * Data provider configurations using Yup - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, num, bool, strWithChoices } = envValidators; - -export interface ProviderConfig { - name: string; - type: 'rest' | 'websocket'; - enabled: boolean; - baseUrl?: string; - apiKey?: string; - apiSecret?: string; - rateLimits?: { - maxRequestsPerMinute?: number; - maxRequestsPerSecond?: number; - maxRequestsPerHour?: number; - }; -} -/** - * Data providers configuration with validation and defaults - */ -export const dataProvidersConfig = cleanEnv(process.env, { - // Default Provider - DEFAULT_DATA_PROVIDER: strWithChoices(['alpaca', 'polygon', 'yahoo', 'iex'], 'alpaca', 'Default data provider'), - - // Alpaca Configuration - ALPACA_API_KEY: str('', 'Alpaca API key'), - ALPACA_API_SECRET: str('', 'Alpaca API secret'), - ALPACA_BASE_URL: str('https://data.alpaca.markets/v1beta1', 'Alpaca base URL'), - ALPACA_RATE_LIMIT: num(200, 'Alpaca rate limit per minute'), - ALPACA_ENABLED: bool(true, 'Enable Alpaca provider'), - - // Polygon Configuration - POLYGON_API_KEY: str('', 'Polygon API key'), - POLYGON_BASE_URL: str('https://api.polygon.io', 'Polygon base URL'), - POLYGON_RATE_LIMIT: num(5, 'Polygon rate limit per minute'), - POLYGON_ENABLED: bool(false, 'Enable Polygon provider'), - - // Yahoo Finance Configuration - YAHOO_BASE_URL: str('https://query1.finance.yahoo.com', 'Yahoo Finance base URL'), - YAHOO_RATE_LIMIT: num(2000, 'Yahoo Finance rate limit per hour'), - YAHOO_ENABLED: bool(true, 'Enable Yahoo Finance provider'), - - // IEX Cloud Configuration - IEX_API_KEY: str('', 'IEX Cloud API key'), - IEX_BASE_URL: str('https://cloud.iexapis.com/stable', 'IEX Cloud base URL'), - IEX_RATE_LIMIT: num(100, 'IEX Cloud rate limit per second'), - IEX_ENABLED: bool(false, 'Enable IEX Cloud provider'), - - // Connection Settings - DATA_PROVIDER_TIMEOUT: num(30000, 'Request timeout in milliseconds'), - DATA_PROVIDER_RETRIES: num(3, 'Number of retry attempts'), - DATA_PROVIDER_RETRY_DELAY: num(1000, 'Retry delay in milliseconds'), - - // Cache Settings - DATA_CACHE_ENABLED: bool(true, 'Enable data caching'), - DATA_CACHE_TTL: num(300000, 'Cache TTL in milliseconds'), - DATA_CACHE_MAX_SIZE: num(1000, 'Maximum cache entries'), -}); - -/** - * Helper function to get provider-specific configuration - */ -export function getProviderConfig(providerName: string) { - // make a interface for the provider config - - const name = providerName.toUpperCase(); - - switch (name) { - case 'ALPACA': - return { - name: 'alpaca', - type: 'rest' as const, - enabled: dataProvidersConfig.ALPACA_ENABLED, - baseUrl: dataProvidersConfig.ALPACA_BASE_URL, - apiKey: dataProvidersConfig.ALPACA_API_KEY, - apiSecret: dataProvidersConfig.ALPACA_API_SECRET, - rateLimits: { - maxRequestsPerMinute: dataProvidersConfig.ALPACA_RATE_LIMIT - } - }; - - case 'POLYGON': - return { - name: 'polygon', - type: 'rest' as const, - enabled: dataProvidersConfig.POLYGON_ENABLED, - baseUrl: dataProvidersConfig.POLYGON_BASE_URL, - apiKey: dataProvidersConfig.POLYGON_API_KEY, - rateLimits: { - maxRequestsPerMinute: dataProvidersConfig.POLYGON_RATE_LIMIT - } - }; - - case 'YAHOO': - return { - name: 'yahoo', - type: 'rest' as const, - enabled: dataProvidersConfig.YAHOO_ENABLED, - baseUrl: dataProvidersConfig.YAHOO_BASE_URL, - rateLimits: { - maxRequestsPerHour: dataProvidersConfig.YAHOO_RATE_LIMIT - } - }; - - case 'IEX': - return { - name: 'iex', - type: 'rest' as const, - enabled: dataProvidersConfig.IEX_ENABLED, - baseUrl: dataProvidersConfig.IEX_BASE_URL, - apiKey: dataProvidersConfig.IEX_API_KEY, - rateLimits: { - maxRequestsPerSecond: dataProvidersConfig.IEX_RATE_LIMIT - } - }; - - default: - throw new Error(`Unknown provider: ${providerName}`); - } -} - -/** - * Get all enabled providers - */ -export function getEnabledProviders() { - const providers = ['alpaca', 'polygon', 'yahoo', 'iex']; - return providers - .map(provider => getProviderConfig(provider)) - .filter(config => config.enabled); -} - -/** - * Get the default provider configuration - */ -export function getDefaultProvider() { - return getProviderConfig(dataProvidersConfig.DEFAULT_DATA_PROVIDER); -} - -// Export typed configuration object -export type DataProvidersConfig = typeof dataProvidersConfig; -export class DataProviders { - static getProviderConfig(providerName: string): ProviderConfig { - return getProviderConfig(providerName); - } - - static getEnabledProviders(): ProviderConfig[] { - return getEnabledProviders(); - } - - static getDefaultProvider(): ProviderConfig { - return getDefaultProvider(); - } -} - - -// Export individual config values for convenience -export const { - DEFAULT_DATA_PROVIDER, - ALPACA_API_KEY, - ALPACA_API_SECRET, - ALPACA_BASE_URL, - ALPACA_RATE_LIMIT, - ALPACA_ENABLED, - POLYGON_API_KEY, - POLYGON_BASE_URL, - POLYGON_RATE_LIMIT, - POLYGON_ENABLED, - YAHOO_BASE_URL, - YAHOO_RATE_LIMIT, - YAHOO_ENABLED, - IEX_API_KEY, - IEX_BASE_URL, - IEX_RATE_LIMIT, - IEX_ENABLED, - DATA_PROVIDER_TIMEOUT, - DATA_PROVIDER_RETRIES, - DATA_PROVIDER_RETRY_DELAY, - DATA_CACHE_ENABLED, - DATA_CACHE_TTL, - DATA_CACHE_MAX_SIZE, -} = dataProvidersConfig; +/** + * Data provider configurations using Yup + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, num, bool, strWithChoices } = envValidators; + +export interface ProviderConfig { + name: string; + type: 'rest' | 'websocket'; + enabled: boolean; + baseUrl?: string; + apiKey?: string; + apiSecret?: string; + rateLimits?: { + maxRequestsPerMinute?: number; + maxRequestsPerSecond?: number; + maxRequestsPerHour?: number; + }; +} +/** + * Data providers configuration with validation and defaults + */ +export const dataProvidersConfig = cleanEnv(process.env, { + // Default Provider + DEFAULT_DATA_PROVIDER: strWithChoices(['alpaca', 'polygon', 'yahoo', 'iex'], 'alpaca', 'Default data provider'), + + // Alpaca Configuration + ALPACA_API_KEY: str('', 'Alpaca API key'), + ALPACA_API_SECRET: str('', 'Alpaca API secret'), + ALPACA_BASE_URL: str('https://data.alpaca.markets/v1beta1', 'Alpaca base URL'), + ALPACA_RATE_LIMIT: num(200, 'Alpaca rate limit per minute'), + ALPACA_ENABLED: bool(true, 'Enable Alpaca provider'), + + // Polygon Configuration + POLYGON_API_KEY: str('', 'Polygon API key'), + POLYGON_BASE_URL: str('https://api.polygon.io', 'Polygon base URL'), + POLYGON_RATE_LIMIT: num(5, 'Polygon rate limit per minute'), + POLYGON_ENABLED: bool(false, 'Enable Polygon provider'), + + // Yahoo Finance Configuration + YAHOO_BASE_URL: str('https://query1.finance.yahoo.com', 'Yahoo Finance base URL'), + YAHOO_RATE_LIMIT: num(2000, 'Yahoo Finance rate limit per hour'), + YAHOO_ENABLED: bool(true, 'Enable Yahoo Finance provider'), + + // IEX Cloud Configuration + IEX_API_KEY: str('', 'IEX Cloud API key'), + IEX_BASE_URL: str('https://cloud.iexapis.com/stable', 'IEX Cloud base URL'), + IEX_RATE_LIMIT: num(100, 'IEX Cloud rate limit per second'), + IEX_ENABLED: bool(false, 'Enable IEX Cloud provider'), + + // Connection Settings + DATA_PROVIDER_TIMEOUT: num(30000, 'Request timeout in milliseconds'), + DATA_PROVIDER_RETRIES: num(3, 'Number of retry attempts'), + DATA_PROVIDER_RETRY_DELAY: num(1000, 'Retry delay in milliseconds'), + + // Cache Settings + DATA_CACHE_ENABLED: bool(true, 'Enable data caching'), + DATA_CACHE_TTL: num(300000, 'Cache TTL in milliseconds'), + DATA_CACHE_MAX_SIZE: num(1000, 'Maximum cache entries'), +}); + +/** + * Helper function to get provider-specific configuration + */ +export function getProviderConfig(providerName: string) { + // make a interface for the provider config + + const name = providerName.toUpperCase(); + + switch (name) { + case 'ALPACA': + return { + name: 'alpaca', + type: 'rest' as const, + enabled: dataProvidersConfig.ALPACA_ENABLED, + baseUrl: dataProvidersConfig.ALPACA_BASE_URL, + apiKey: dataProvidersConfig.ALPACA_API_KEY, + apiSecret: dataProvidersConfig.ALPACA_API_SECRET, + rateLimits: { + maxRequestsPerMinute: dataProvidersConfig.ALPACA_RATE_LIMIT + } + }; + + case 'POLYGON': + return { + name: 'polygon', + type: 'rest' as const, + enabled: dataProvidersConfig.POLYGON_ENABLED, + baseUrl: dataProvidersConfig.POLYGON_BASE_URL, + apiKey: dataProvidersConfig.POLYGON_API_KEY, + rateLimits: { + maxRequestsPerMinute: dataProvidersConfig.POLYGON_RATE_LIMIT + } + }; + + case 'YAHOO': + return { + name: 'yahoo', + type: 'rest' as const, + enabled: dataProvidersConfig.YAHOO_ENABLED, + baseUrl: dataProvidersConfig.YAHOO_BASE_URL, + rateLimits: { + maxRequestsPerHour: dataProvidersConfig.YAHOO_RATE_LIMIT + } + }; + + case 'IEX': + return { + name: 'iex', + type: 'rest' as const, + enabled: dataProvidersConfig.IEX_ENABLED, + baseUrl: dataProvidersConfig.IEX_BASE_URL, + apiKey: dataProvidersConfig.IEX_API_KEY, + rateLimits: { + maxRequestsPerSecond: dataProvidersConfig.IEX_RATE_LIMIT + } + }; + + default: + throw new Error(`Unknown provider: ${providerName}`); + } +} + +/** + * Get all enabled providers + */ +export function getEnabledProviders() { + const providers = ['alpaca', 'polygon', 'yahoo', 'iex']; + return providers + .map(provider => getProviderConfig(provider)) + .filter(config => config.enabled); +} + +/** + * Get the default provider configuration + */ +export function getDefaultProvider() { + return getProviderConfig(dataProvidersConfig.DEFAULT_DATA_PROVIDER); +} + +// Export typed configuration object +export type DataProvidersConfig = typeof dataProvidersConfig; +export class DataProviders { + static getProviderConfig(providerName: string): ProviderConfig { + return getProviderConfig(providerName); + } + + static getEnabledProviders(): ProviderConfig[] { + return getEnabledProviders(); + } + + static getDefaultProvider(): ProviderConfig { + return getDefaultProvider(); + } +} + + +// Export individual config values for convenience +export const { + DEFAULT_DATA_PROVIDER, + ALPACA_API_KEY, + ALPACA_API_SECRET, + ALPACA_BASE_URL, + ALPACA_RATE_LIMIT, + ALPACA_ENABLED, + POLYGON_API_KEY, + POLYGON_BASE_URL, + POLYGON_RATE_LIMIT, + POLYGON_ENABLED, + YAHOO_BASE_URL, + YAHOO_RATE_LIMIT, + YAHOO_ENABLED, + IEX_API_KEY, + IEX_BASE_URL, + IEX_RATE_LIMIT, + IEX_ENABLED, + DATA_PROVIDER_TIMEOUT, + DATA_PROVIDER_RETRIES, + DATA_PROVIDER_RETRY_DELAY, + DATA_CACHE_ENABLED, + DATA_CACHE_TTL, + DATA_CACHE_MAX_SIZE, +} = dataProvidersConfig; diff --git a/libs/config/src/database.ts b/libs/config/src/database.ts index b95f950..ef02f7b 100644 --- a/libs/config/src/database.ts +++ b/libs/config/src/database.ts @@ -1,56 +1,56 @@ -/** - * Database configuration using Yup - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, port, num, bool } = envValidators; - -/** - * Database configuration with validation and defaults - */ -export const databaseConfig = cleanEnv(process.env, { - // PostgreSQL Configuration - DB_HOST: str('localhost', 'Database host'), - DB_PORT: port(5432, 'Database port'), - DB_NAME: str('stockbot', 'Database name'), - DB_USER: str('stockbot', 'Database user'), - DB_PASSWORD: str('', 'Database password'), - - // Connection Pool Settings - DB_POOL_MIN: num(2, 'Minimum pool connections'), - DB_POOL_MAX: num(10, 'Maximum pool connections'), - DB_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'), - - // SSL Configuration - DB_SSL: bool(false, 'Enable SSL for database connection'), - DB_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'), - - // Additional Settings - DB_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'), - DB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'), - DB_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'), - DB_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'), - DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'), -}); - -// Export typed configuration object -export type DatabaseConfig = typeof databaseConfig; - -// Export individual config values for convenience -export const { - DB_HOST, - DB_PORT, - DB_NAME, - DB_USER, - DB_PASSWORD, - DB_POOL_MIN, - DB_POOL_MAX, - DB_POOL_IDLE_TIMEOUT, - DB_SSL, - DB_SSL_REJECT_UNAUTHORIZED, - DB_QUERY_TIMEOUT, - DB_CONNECTION_TIMEOUT, - DB_STATEMENT_TIMEOUT, - DB_LOCK_TIMEOUT, - DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT, -} = databaseConfig; +/** + * Database configuration using Yup + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, port, num, bool } = envValidators; + +/** + * Database configuration with validation and defaults + */ +export const databaseConfig = cleanEnv(process.env, { + // PostgreSQL Configuration + DB_HOST: str('localhost', 'Database host'), + DB_PORT: port(5432, 'Database port'), + DB_NAME: str('stockbot', 'Database name'), + DB_USER: str('stockbot', 'Database user'), + DB_PASSWORD: str('', 'Database password'), + + // Connection Pool Settings + DB_POOL_MIN: num(2, 'Minimum pool connections'), + DB_POOL_MAX: num(10, 'Maximum pool connections'), + DB_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'), + + // SSL Configuration + DB_SSL: bool(false, 'Enable SSL for database connection'), + DB_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'), + + // Additional Settings + DB_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'), + DB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'), + DB_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'), + DB_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'), + DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'), +}); + +// Export typed configuration object +export type DatabaseConfig = typeof databaseConfig; + +// Export individual config values for convenience +export const { + DB_HOST, + DB_PORT, + DB_NAME, + DB_USER, + DB_PASSWORD, + DB_POOL_MIN, + DB_POOL_MAX, + DB_POOL_IDLE_TIMEOUT, + DB_SSL, + DB_SSL_REJECT_UNAUTHORIZED, + DB_QUERY_TIMEOUT, + DB_CONNECTION_TIMEOUT, + DB_STATEMENT_TIMEOUT, + DB_LOCK_TIMEOUT, + DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT, +} = databaseConfig; diff --git a/libs/config/src/dragonfly.ts b/libs/config/src/dragonfly.ts index a9718b5..99e2ee4 100644 --- a/libs/config/src/dragonfly.ts +++ b/libs/config/src/dragonfly.ts @@ -1,81 +1,81 @@ -/** - * Dragonfly (Redis replacement) configuration using Yup - * High-performance caching and event streaming - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, port, num, bool } = envValidators; - -/** - * Dragonfly configuration with validation and defaults - */ -export const dragonflyConfig = cleanEnv(process.env, { - // Dragonfly Connection - DRAGONFLY_HOST: str('localhost', 'Dragonfly host'), - DRAGONFLY_PORT: port(6379, 'Dragonfly port'), - DRAGONFLY_PASSWORD: str('', 'Dragonfly password (if auth enabled)'), - DRAGONFLY_USERNAME: str('', 'Dragonfly username (if ACL enabled)'), - - // Database Selection - DRAGONFLY_DATABASE: num(0, 'Dragonfly database number (0-15)'), - - // Connection Pool Settings - DRAGONFLY_MAX_RETRIES: num(3, 'Maximum retry attempts'), - DRAGONFLY_RETRY_DELAY: num(50, 'Retry delay in ms'), - DRAGONFLY_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'), - DRAGONFLY_COMMAND_TIMEOUT: num(5000, 'Command timeout in ms'), - - // Pool Configuration - DRAGONFLY_POOL_SIZE: num(10, 'Connection pool size'), - DRAGONFLY_POOL_MIN: num(1, 'Minimum pool connections'), - DRAGONFLY_POOL_MAX: num(20, 'Maximum pool connections'), - - // TLS Settings - DRAGONFLY_TLS: bool(false, 'Enable TLS for Dragonfly connection'), - DRAGONFLY_TLS_CERT_FILE: str('', 'Path to TLS certificate file'), - DRAGONFLY_TLS_KEY_FILE: str('', 'Path to TLS key file'), - DRAGONFLY_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'), - DRAGONFLY_TLS_SKIP_VERIFY: bool(false, 'Skip TLS certificate verification'), - - // Performance Settings - DRAGONFLY_ENABLE_KEEPALIVE: bool(true, 'Enable TCP keepalive'), - DRAGONFLY_KEEPALIVE_INTERVAL: num(60, 'Keepalive interval in seconds'), - - // Clustering (if using cluster mode) - DRAGONFLY_CLUSTER_MODE: bool(false, 'Enable cluster mode'), - DRAGONFLY_CLUSTER_NODES: str('', 'Comma-separated list of cluster nodes (host:port)'), - - // Memory and Cache Settings - DRAGONFLY_MAX_MEMORY: str('2gb', 'Maximum memory usage'), - DRAGONFLY_CACHE_MODE: bool(true, 'Enable cache mode'), -}); - -// Export typed configuration object -export type DragonflyConfig = typeof dragonflyConfig; - -// Export individual config values for convenience -export const { - DRAGONFLY_HOST, - DRAGONFLY_PORT, - DRAGONFLY_PASSWORD, - DRAGONFLY_USERNAME, - DRAGONFLY_DATABASE, - DRAGONFLY_MAX_RETRIES, - DRAGONFLY_RETRY_DELAY, - DRAGONFLY_CONNECT_TIMEOUT, - DRAGONFLY_COMMAND_TIMEOUT, - DRAGONFLY_POOL_SIZE, - DRAGONFLY_POOL_MIN, - DRAGONFLY_POOL_MAX, - DRAGONFLY_TLS, - DRAGONFLY_TLS_CERT_FILE, - DRAGONFLY_TLS_KEY_FILE, - DRAGONFLY_TLS_CA_FILE, - DRAGONFLY_TLS_SKIP_VERIFY, - DRAGONFLY_ENABLE_KEEPALIVE, - DRAGONFLY_KEEPALIVE_INTERVAL, - DRAGONFLY_CLUSTER_MODE, - DRAGONFLY_CLUSTER_NODES, - DRAGONFLY_MAX_MEMORY, - DRAGONFLY_CACHE_MODE, -} = dragonflyConfig; +/** + * Dragonfly (Redis replacement) configuration using Yup + * High-performance caching and event streaming + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, port, num, bool } = envValidators; + +/** + * Dragonfly configuration with validation and defaults + */ +export const dragonflyConfig = cleanEnv(process.env, { + // Dragonfly Connection + DRAGONFLY_HOST: str('localhost', 'Dragonfly host'), + DRAGONFLY_PORT: port(6379, 'Dragonfly port'), + DRAGONFLY_PASSWORD: str('', 'Dragonfly password (if auth enabled)'), + DRAGONFLY_USERNAME: str('', 'Dragonfly username (if ACL enabled)'), + + // Database Selection + DRAGONFLY_DATABASE: num(0, 'Dragonfly database number (0-15)'), + + // Connection Pool Settings + DRAGONFLY_MAX_RETRIES: num(3, 'Maximum retry attempts'), + DRAGONFLY_RETRY_DELAY: num(50, 'Retry delay in ms'), + DRAGONFLY_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'), + DRAGONFLY_COMMAND_TIMEOUT: num(5000, 'Command timeout in ms'), + + // Pool Configuration + DRAGONFLY_POOL_SIZE: num(10, 'Connection pool size'), + DRAGONFLY_POOL_MIN: num(1, 'Minimum pool connections'), + DRAGONFLY_POOL_MAX: num(20, 'Maximum pool connections'), + + // TLS Settings + DRAGONFLY_TLS: bool(false, 'Enable TLS for Dragonfly connection'), + DRAGONFLY_TLS_CERT_FILE: str('', 'Path to TLS certificate file'), + DRAGONFLY_TLS_KEY_FILE: str('', 'Path to TLS key file'), + DRAGONFLY_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'), + DRAGONFLY_TLS_SKIP_VERIFY: bool(false, 'Skip TLS certificate verification'), + + // Performance Settings + DRAGONFLY_ENABLE_KEEPALIVE: bool(true, 'Enable TCP keepalive'), + DRAGONFLY_KEEPALIVE_INTERVAL: num(60, 'Keepalive interval in seconds'), + + // Clustering (if using cluster mode) + DRAGONFLY_CLUSTER_MODE: bool(false, 'Enable cluster mode'), + DRAGONFLY_CLUSTER_NODES: str('', 'Comma-separated list of cluster nodes (host:port)'), + + // Memory and Cache Settings + DRAGONFLY_MAX_MEMORY: str('2gb', 'Maximum memory usage'), + DRAGONFLY_CACHE_MODE: bool(true, 'Enable cache mode'), +}); + +// Export typed configuration object +export type DragonflyConfig = typeof dragonflyConfig; + +// Export individual config values for convenience +export const { + DRAGONFLY_HOST, + DRAGONFLY_PORT, + DRAGONFLY_PASSWORD, + DRAGONFLY_USERNAME, + DRAGONFLY_DATABASE, + DRAGONFLY_MAX_RETRIES, + DRAGONFLY_RETRY_DELAY, + DRAGONFLY_CONNECT_TIMEOUT, + DRAGONFLY_COMMAND_TIMEOUT, + DRAGONFLY_POOL_SIZE, + DRAGONFLY_POOL_MIN, + DRAGONFLY_POOL_MAX, + DRAGONFLY_TLS, + DRAGONFLY_TLS_CERT_FILE, + DRAGONFLY_TLS_KEY_FILE, + DRAGONFLY_TLS_CA_FILE, + DRAGONFLY_TLS_SKIP_VERIFY, + DRAGONFLY_ENABLE_KEEPALIVE, + DRAGONFLY_KEEPALIVE_INTERVAL, + DRAGONFLY_CLUSTER_MODE, + DRAGONFLY_CLUSTER_NODES, + DRAGONFLY_MAX_MEMORY, + DRAGONFLY_CACHE_MODE, +} = dragonflyConfig; diff --git a/libs/config/src/env-utils.ts b/libs/config/src/env-utils.ts index 94e6b3e..6f037f8 100644 --- a/libs/config/src/env-utils.ts +++ b/libs/config/src/env-utils.ts @@ -1,162 +1,162 @@ -/** - * Environment validation utilities using Yup - */ -import * as yup from 'yup'; -import { config } from 'dotenv'; -import { join } from 'path'; -import { existsSync } from 'fs'; - -// Function to find and load environment variables -function loadEnvFiles() { - const cwd = process.cwd(); - const possiblePaths = [ - // Current working directory - join(cwd, '.env'), - join(cwd, '.env.local'), - // Root of the workspace (common pattern) - join(cwd, '../../.env'), - join(cwd, '../../../.env'), - // Config library directory - join(__dirname, '../.env'), - join(__dirname, '../../.env'), - join(__dirname, '../../../.env'), - ]; - - // Try to load each possible .env file - for (const envPath of possiblePaths) { - if (existsSync(envPath)) { - console.log(`πŸ“„ Loading environment from: ${envPath}`); - config({ path: envPath }); - break; // Use the first .env file found - } - } - - // Also try to load environment-specific files - const environment = process.env.NODE_ENV || 'development'; - const envSpecificPaths = [ - join(cwd, `.env.${environment}`), - join(cwd, `.env.${environment}.local`), - ]; - - for (const envPath of envSpecificPaths) { - if (existsSync(envPath)) { - console.log(`πŸ“„ Loading ${environment} environment from: ${envPath}`); - config({ path: envPath, override: false }); // Don't override existing vars - } - } -} - -// Load environment variables -loadEnvFiles(); - -/** - * Creates a Yup schema for environment variable validation - */ -export function createEnvSchema(shape: Record) { - return yup.object(shape); -} - -/** - * Validates environment variables against a Yup schema - */ -export function validateEnv( - schema: yup.ObjectSchema, - env = process.env -): any { - try { - const result = schema.validateSync(env, { abortEarly: false }); - return result; - } catch (error) { - if (error instanceof yup.ValidationError) { - console.error('❌ Invalid environment variables:'); - error.inner.forEach((err) => { - console.error(` ${err.path}: ${err.message}`); - }); - } - throw new Error('Environment validation failed'); - } -} - -/** - * Manually load environment variables from a specific path - */ -export function loadEnv(path?: string) { - if (path) { - console.log(`πŸ“„ Manually loading environment from: ${path}`); - config({ path }); - } else { - loadEnvFiles(); - } -} - -/** - * Helper functions for common validation patterns - */ -export const envValidators = { - // String with default - str: (defaultValue?: string, description?: string) => - yup.string().default(defaultValue || ''), - - // String with choices (enum) - strWithChoices: (choices: string[], defaultValue?: string, description?: string) => - yup.string().oneOf(choices).default(defaultValue || choices[0]), - - // Required string - requiredStr: (description?: string) => - yup.string().required('Required'), - - // Port number - port: (defaultValue?: number, description?: string) => - yup.number() - .integer() - .min(1) - .max(65535) - .transform((val, originalVal) => { - if (typeof originalVal === 'string') { - return parseInt(originalVal, 10); - } - return val; - }) - .default(defaultValue || 3000), - - // Number with default - num: (defaultValue?: number, description?: string) => - yup.number() - .transform((val, originalVal) => { - if (typeof originalVal === 'string') { - return parseFloat(originalVal); - } - return val; - }) - .default(defaultValue || 0), - - // Boolean with default - bool: (defaultValue?: boolean, description?: string) => - yup.boolean() - .transform((val, originalVal) => { - if (typeof originalVal === 'string') { - return originalVal === 'true' || originalVal === '1'; - } - return val; - }) - .default(defaultValue || false), - - // URL validation - url: (defaultValue?: string, description?: string) => - yup.string().url().default(defaultValue || 'http://localhost'), - - // Email validation - email: (description?: string) => - yup.string().email(), -}; - -/** - * Legacy compatibility - creates a cleanEnv-like function - */ -export function cleanEnv( - env: Record, - validators: Record -): any { - const schema = createEnvSchema(validators); - return validateEnv(schema, env); -} +/** + * Environment validation utilities using Yup + */ +import * as yup from 'yup'; +import { config } from 'dotenv'; +import { join } from 'path'; +import { existsSync } from 'fs'; + +// Function to find and load environment variables +function loadEnvFiles() { + const cwd = process.cwd(); + const possiblePaths = [ + // Current working directory + join(cwd, '.env'), + join(cwd, '.env.local'), + // Root of the workspace (common pattern) + join(cwd, '../../.env'), + join(cwd, '../../../.env'), + // Config library directory + join(__dirname, '../.env'), + join(__dirname, '../../.env'), + join(__dirname, '../../../.env'), + ]; + + // Try to load each possible .env file + for (const envPath of possiblePaths) { + if (existsSync(envPath)) { + console.log(`πŸ“„ Loading environment from: ${envPath}`); + config({ path: envPath }); + break; // Use the first .env file found + } + } + + // Also try to load environment-specific files + const environment = process.env.NODE_ENV || 'development'; + const envSpecificPaths = [ + join(cwd, `.env.${environment}`), + join(cwd, `.env.${environment}.local`), + ]; + + for (const envPath of envSpecificPaths) { + if (existsSync(envPath)) { + console.log(`πŸ“„ Loading ${environment} environment from: ${envPath}`); + config({ path: envPath, override: false }); // Don't override existing vars + } + } +} + +// Load environment variables +loadEnvFiles(); + +/** + * Creates a Yup schema for environment variable validation + */ +export function createEnvSchema(shape: Record) { + return yup.object(shape); +} + +/** + * Validates environment variables against a Yup schema + */ +export function validateEnv( + schema: yup.ObjectSchema, + env = process.env +): any { + try { + const result = schema.validateSync(env, { abortEarly: false }); + return result; + } catch (error) { + if (error instanceof yup.ValidationError) { + console.error('❌ Invalid environment variables:'); + error.inner.forEach((err) => { + console.error(` ${err.path}: ${err.message}`); + }); + } + throw new Error('Environment validation failed'); + } +} + +/** + * Manually load environment variables from a specific path + */ +export function loadEnv(path?: string) { + if (path) { + console.log(`πŸ“„ Manually loading environment from: ${path}`); + config({ path }); + } else { + loadEnvFiles(); + } +} + +/** + * Helper functions for common validation patterns + */ +export const envValidators = { + // String with default + str: (defaultValue?: string, description?: string) => + yup.string().default(defaultValue || ''), + + // String with choices (enum) + strWithChoices: (choices: string[], defaultValue?: string, description?: string) => + yup.string().oneOf(choices).default(defaultValue || choices[0]), + + // Required string + requiredStr: (description?: string) => + yup.string().required('Required'), + + // Port number + port: (defaultValue?: number, description?: string) => + yup.number() + .integer() + .min(1) + .max(65535) + .transform((val, originalVal) => { + if (typeof originalVal === 'string') { + return parseInt(originalVal, 10); + } + return val; + }) + .default(defaultValue || 3000), + + // Number with default + num: (defaultValue?: number, description?: string) => + yup.number() + .transform((val, originalVal) => { + if (typeof originalVal === 'string') { + return parseFloat(originalVal); + } + return val; + }) + .default(defaultValue || 0), + + // Boolean with default + bool: (defaultValue?: boolean, description?: string) => + yup.boolean() + .transform((val, originalVal) => { + if (typeof originalVal === 'string') { + return originalVal === 'true' || originalVal === '1'; + } + return val; + }) + .default(defaultValue || false), + + // URL validation + url: (defaultValue?: string, description?: string) => + yup.string().url().default(defaultValue || 'http://localhost'), + + // Email validation + email: (description?: string) => + yup.string().email(), +}; + +/** + * Legacy compatibility - creates a cleanEnv-like function + */ +export function cleanEnv( + env: Record, + validators: Record +): any { + const schema = createEnvSchema(validators); + return validateEnv(schema, env); +} diff --git a/libs/config/src/index.ts b/libs/config/src/index.ts index 710fe8c..6d58b00 100644 --- a/libs/config/src/index.ts +++ b/libs/config/src/index.ts @@ -1,20 +1,20 @@ -/** - * @stock-bot/config - * - * Configuration management library for Stock Bot platform using Yup - */ - -// Re-export everything from all modules -export * from './env-utils'; -export * from './core'; -export * from './admin-interfaces'; -export * from './database'; -export * from './dragonfly'; -export * from './postgres'; -export * from './questdb'; -export * from './mongodb'; -export * from './logging'; -export * from './loki'; -export * from './monitoring'; -export * from './data-providers'; -export * from './risk'; +/** + * @stock-bot/config + * + * Configuration management library for Stock Bot platform using Yup + */ + +// Re-export everything from all modules +export * from './env-utils'; +export * from './core'; +export * from './admin-interfaces'; +export * from './database'; +export * from './dragonfly'; +export * from './postgres'; +export * from './questdb'; +export * from './mongodb'; +export * from './logging'; +export * from './loki'; +export * from './monitoring'; +export * from './data-providers'; +export * from './risk'; diff --git a/libs/config/src/logging.ts b/libs/config/src/logging.ts index d411938..04c1c6b 100644 --- a/libs/config/src/logging.ts +++ b/libs/config/src/logging.ts @@ -1,74 +1,74 @@ -/** - * Logging configuration using Yup - * Application logging settings without Loki (Loki config is in monitoring.ts) - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, bool, num, strWithChoices } = envValidators; - -/** - * Logging configuration with validation and defaults - */ -export const loggingConfig = cleanEnv(process.env, { - // Basic Logging Settings - LOG_LEVEL: strWithChoices(['debug', 'info', 'warn', 'error'], 'info', 'Logging level'), - LOG_FORMAT: strWithChoices(['json', 'simple', 'combined'], 'json', 'Log output format'), - LOG_CONSOLE: bool(true, 'Enable console logging'), - LOG_FILE: bool(false, 'Enable file logging'), - - // File Logging Settings - LOG_FILE_PATH: str('logs', 'Log file directory path'), - LOG_FILE_MAX_SIZE: str('20m', 'Maximum log file size'), - LOG_FILE_MAX_FILES: num(14, 'Maximum number of log files to keep'), - LOG_FILE_DATE_PATTERN: str('YYYY-MM-DD', 'Log file date pattern'), - - // Error Logging - LOG_ERROR_FILE: bool(true, 'Enable separate error log file'), - LOG_ERROR_STACK: bool(true, 'Include stack traces in error logs'), - - // Performance Logging - LOG_PERFORMANCE: bool(false, 'Enable performance logging'), - LOG_SQL_QUERIES: bool(false, 'Log SQL queries'), - LOG_HTTP_REQUESTS: bool(true, 'Log HTTP requests'), - - // Structured Logging - LOG_STRUCTURED: bool(true, 'Use structured logging format'), - LOG_TIMESTAMP: bool(true, 'Include timestamps in logs'), - LOG_CALLER_INFO: bool(false, 'Include caller information in logs'), - // Log Filtering - LOG_SILENT_MODULES: str('', 'Comma-separated list of modules to silence'), - LOG_VERBOSE_MODULES: str('', 'Comma-separated list of modules for verbose logging'), - - // Application Context - LOG_SERVICE_NAME: str('stock-bot', 'Service name for log context'), - LOG_SERVICE_VERSION: str('1.0.0', 'Service version for log context'), - LOG_ENVIRONMENT: str('development', 'Environment for log context'), -}); - -// Export typed configuration object -export type LoggingConfig = typeof loggingConfig; - -// Export individual config values for convenience -export const { - LOG_LEVEL, - LOG_FORMAT, - LOG_CONSOLE, - LOG_FILE, - LOG_FILE_PATH, - LOG_FILE_MAX_SIZE, - LOG_FILE_MAX_FILES, - LOG_FILE_DATE_PATTERN, - LOG_ERROR_FILE, - LOG_ERROR_STACK, - LOG_PERFORMANCE, - LOG_SQL_QUERIES, - LOG_HTTP_REQUESTS, - LOG_STRUCTURED, - LOG_TIMESTAMP, - LOG_CALLER_INFO, - LOG_SILENT_MODULES, - LOG_VERBOSE_MODULES, - LOG_SERVICE_NAME, - LOG_SERVICE_VERSION, - LOG_ENVIRONMENT, -} = loggingConfig; +/** + * Logging configuration using Yup + * Application logging settings without Loki (Loki config is in monitoring.ts) + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, bool, num, strWithChoices } = envValidators; + +/** + * Logging configuration with validation and defaults + */ +export const loggingConfig = cleanEnv(process.env, { + // Basic Logging Settings + LOG_LEVEL: strWithChoices(['debug', 'info', 'warn', 'error'], 'info', 'Logging level'), + LOG_FORMAT: strWithChoices(['json', 'simple', 'combined'], 'json', 'Log output format'), + LOG_CONSOLE: bool(true, 'Enable console logging'), + LOG_FILE: bool(false, 'Enable file logging'), + + // File Logging Settings + LOG_FILE_PATH: str('logs', 'Log file directory path'), + LOG_FILE_MAX_SIZE: str('20m', 'Maximum log file size'), + LOG_FILE_MAX_FILES: num(14, 'Maximum number of log files to keep'), + LOG_FILE_DATE_PATTERN: str('YYYY-MM-DD', 'Log file date pattern'), + + // Error Logging + LOG_ERROR_FILE: bool(true, 'Enable separate error log file'), + LOG_ERROR_STACK: bool(true, 'Include stack traces in error logs'), + + // Performance Logging + LOG_PERFORMANCE: bool(false, 'Enable performance logging'), + LOG_SQL_QUERIES: bool(false, 'Log SQL queries'), + LOG_HTTP_REQUESTS: bool(true, 'Log HTTP requests'), + + // Structured Logging + LOG_STRUCTURED: bool(true, 'Use structured logging format'), + LOG_TIMESTAMP: bool(true, 'Include timestamps in logs'), + LOG_CALLER_INFO: bool(false, 'Include caller information in logs'), + // Log Filtering + LOG_SILENT_MODULES: str('', 'Comma-separated list of modules to silence'), + LOG_VERBOSE_MODULES: str('', 'Comma-separated list of modules for verbose logging'), + + // Application Context + LOG_SERVICE_NAME: str('stock-bot', 'Service name for log context'), + LOG_SERVICE_VERSION: str('1.0.0', 'Service version for log context'), + LOG_ENVIRONMENT: str('development', 'Environment for log context'), +}); + +// Export typed configuration object +export type LoggingConfig = typeof loggingConfig; + +// Export individual config values for convenience +export const { + LOG_LEVEL, + LOG_FORMAT, + LOG_CONSOLE, + LOG_FILE, + LOG_FILE_PATH, + LOG_FILE_MAX_SIZE, + LOG_FILE_MAX_FILES, + LOG_FILE_DATE_PATTERN, + LOG_ERROR_FILE, + LOG_ERROR_STACK, + LOG_PERFORMANCE, + LOG_SQL_QUERIES, + LOG_HTTP_REQUESTS, + LOG_STRUCTURED, + LOG_TIMESTAMP, + LOG_CALLER_INFO, + LOG_SILENT_MODULES, + LOG_VERBOSE_MODULES, + LOG_SERVICE_NAME, + LOG_SERVICE_VERSION, + LOG_ENVIRONMENT, +} = loggingConfig; diff --git a/libs/config/src/loki.ts b/libs/config/src/loki.ts index 357fbe0..4ed0fbb 100644 --- a/libs/config/src/loki.ts +++ b/libs/config/src/loki.ts @@ -1,63 +1,63 @@ -/** - * Loki log aggregation configuration using Yup - * Centralized logging configuration for the Stock Bot platform - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, port, bool, num } = envValidators; - -/** - * Loki configuration with validation and defaults - */ -export const lokiConfig = cleanEnv(process.env, { - // Loki Server - LOKI_HOST: str('localhost', 'Loki host'), - LOKI_PORT: port(3100, 'Loki port'), - LOKI_URL: str('', 'Complete Loki URL (overrides host/port)'), - - // Authentication - LOKI_USERNAME: str('', 'Loki username (if auth enabled)'), - LOKI_PASSWORD: str('', 'Loki password (if auth enabled)'), - LOKI_TENANT_ID: str('', 'Loki tenant ID (for multi-tenancy)'), - - // Push Configuration - LOKI_PUSH_TIMEOUT: num(10000, 'Push timeout in ms'), - LOKI_BATCH_SIZE: num(1024, 'Batch size for log entries'), - LOKI_BATCH_WAIT: num(5, 'Batch wait time in ms'), - - // Retention Settings - LOKI_RETENTION_PERIOD: str('30d', 'Log retention period'), - LOKI_MAX_CHUNK_AGE: str('1h', 'Maximum chunk age'), - - // TLS Settings - LOKI_TLS_ENABLED: bool(false, 'Enable TLS for Loki'), - LOKI_TLS_INSECURE: bool(false, 'Skip TLS verification'), - - // Log Labels - LOKI_DEFAULT_LABELS: str('', 'Default labels for all log entries (JSON format)'), - LOKI_SERVICE_LABEL: str('stock-bot', 'Service label for log entries'), - LOKI_ENVIRONMENT_LABEL: str('development', 'Environment label for log entries'), -}); - -// Export typed configuration object -export type LokiConfig = typeof lokiConfig; - -// Export individual config values for convenience -export const { - LOKI_HOST, - LOKI_PORT, - LOKI_URL, - LOKI_USERNAME, - LOKI_PASSWORD, - LOKI_TENANT_ID, - LOKI_PUSH_TIMEOUT, - LOKI_BATCH_SIZE, - LOKI_BATCH_WAIT, - LOKI_RETENTION_PERIOD, - LOKI_MAX_CHUNK_AGE, - LOKI_TLS_ENABLED, - LOKI_TLS_INSECURE, - LOKI_DEFAULT_LABELS, - LOKI_SERVICE_LABEL, - LOKI_ENVIRONMENT_LABEL, -} = lokiConfig; +/** + * Loki log aggregation configuration using Yup + * Centralized logging configuration for the Stock Bot platform + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, port, bool, num } = envValidators; + +/** + * Loki configuration with validation and defaults + */ +export const lokiConfig = cleanEnv(process.env, { + // Loki Server + LOKI_HOST: str('localhost', 'Loki host'), + LOKI_PORT: port(3100, 'Loki port'), + LOKI_URL: str('', 'Complete Loki URL (overrides host/port)'), + + // Authentication + LOKI_USERNAME: str('', 'Loki username (if auth enabled)'), + LOKI_PASSWORD: str('', 'Loki password (if auth enabled)'), + LOKI_TENANT_ID: str('', 'Loki tenant ID (for multi-tenancy)'), + + // Push Configuration + LOKI_PUSH_TIMEOUT: num(10000, 'Push timeout in ms'), + LOKI_BATCH_SIZE: num(1024, 'Batch size for log entries'), + LOKI_BATCH_WAIT: num(5, 'Batch wait time in ms'), + + // Retention Settings + LOKI_RETENTION_PERIOD: str('30d', 'Log retention period'), + LOKI_MAX_CHUNK_AGE: str('1h', 'Maximum chunk age'), + + // TLS Settings + LOKI_TLS_ENABLED: bool(false, 'Enable TLS for Loki'), + LOKI_TLS_INSECURE: bool(false, 'Skip TLS verification'), + + // Log Labels + LOKI_DEFAULT_LABELS: str('', 'Default labels for all log entries (JSON format)'), + LOKI_SERVICE_LABEL: str('stock-bot', 'Service label for log entries'), + LOKI_ENVIRONMENT_LABEL: str('development', 'Environment label for log entries'), +}); + +// Export typed configuration object +export type LokiConfig = typeof lokiConfig; + +// Export individual config values for convenience +export const { + LOKI_HOST, + LOKI_PORT, + LOKI_URL, + LOKI_USERNAME, + LOKI_PASSWORD, + LOKI_TENANT_ID, + LOKI_PUSH_TIMEOUT, + LOKI_BATCH_SIZE, + LOKI_BATCH_WAIT, + LOKI_RETENTION_PERIOD, + LOKI_MAX_CHUNK_AGE, + LOKI_TLS_ENABLED, + LOKI_TLS_INSECURE, + LOKI_DEFAULT_LABELS, + LOKI_SERVICE_LABEL, + LOKI_ENVIRONMENT_LABEL, +} = lokiConfig; diff --git a/libs/config/src/mongodb.ts b/libs/config/src/mongodb.ts index 1c84a47..d1e0b4c 100644 --- a/libs/config/src/mongodb.ts +++ b/libs/config/src/mongodb.ts @@ -1,73 +1,73 @@ -/** - * MongoDB configuration using Yup - * Document storage for sentiment data, raw documents, and unstructured data - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, port, bool, num, strWithChoices } = envValidators; - -/** - * MongoDB configuration with validation and defaults - */ -export const mongodbConfig = cleanEnv(process.env, { - // MongoDB Connection - MONGODB_HOST: str('localhost', 'MongoDB host'), - MONGODB_PORT: port(27017, 'MongoDB port'), - MONGODB_DATABASE: str('trading_documents', 'MongoDB database name'), - - // Authentication - MONGODB_USERNAME: str('trading_admin', 'MongoDB username'), - MONGODB_PASSWORD: str('', 'MongoDB password'), - MONGODB_AUTH_SOURCE: str('admin', 'MongoDB authentication database'), - - // Connection URI (alternative to individual settings) - MONGODB_URI: str('', 'Complete MongoDB connection URI (overrides individual settings)'), - - // Connection Pool Settings - MONGODB_MAX_POOL_SIZE: num(10, 'Maximum connection pool size'), - MONGODB_MIN_POOL_SIZE: num(0, 'Minimum connection pool size'), - MONGODB_MAX_IDLE_TIME: num(30000, 'Maximum idle time for connections in ms'), - - // Timeouts - MONGODB_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'), - MONGODB_SOCKET_TIMEOUT: num(30000, 'Socket timeout in ms'), - MONGODB_SERVER_SELECTION_TIMEOUT: num(5000, 'Server selection timeout in ms'), - - // SSL/TLS Settings - MONGODB_TLS: bool(false, 'Enable TLS for MongoDB connection'), - MONGODB_TLS_INSECURE: bool(false, 'Allow invalid certificates in TLS mode'), - MONGODB_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'), - - // Additional Settings - MONGODB_RETRY_WRITES: bool(true, 'Enable retryable writes'), - MONGODB_JOURNAL: bool(true, 'Enable write concern journal'), - MONGODB_READ_PREFERENCE: strWithChoices(['primary', 'primaryPreferred', 'secondary', 'secondaryPreferred', 'nearest'], 'primary', 'MongoDB read preference'), - MONGODB_WRITE_CONCERN: str('majority', 'Write concern level'), -}); - -// Export typed configuration object -export type MongoDbConfig = typeof mongodbConfig; - -// Export individual config values for convenience -export const { - MONGODB_HOST, - MONGODB_PORT, - MONGODB_DATABASE, - MONGODB_USERNAME, - MONGODB_PASSWORD, - MONGODB_AUTH_SOURCE, - MONGODB_URI, - MONGODB_MAX_POOL_SIZE, - MONGODB_MIN_POOL_SIZE, - MONGODB_MAX_IDLE_TIME, - MONGODB_CONNECT_TIMEOUT, - MONGODB_SOCKET_TIMEOUT, - MONGODB_SERVER_SELECTION_TIMEOUT, - MONGODB_TLS, - MONGODB_TLS_INSECURE, - MONGODB_TLS_CA_FILE, - MONGODB_RETRY_WRITES, - MONGODB_JOURNAL, - MONGODB_READ_PREFERENCE, - MONGODB_WRITE_CONCERN, -} = mongodbConfig; +/** + * MongoDB configuration using Yup + * Document storage for sentiment data, raw documents, and unstructured data + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, port, bool, num, strWithChoices } = envValidators; + +/** + * MongoDB configuration with validation and defaults + */ +export const mongodbConfig = cleanEnv(process.env, { + // MongoDB Connection + MONGODB_HOST: str('localhost', 'MongoDB host'), + MONGODB_PORT: port(27017, 'MongoDB port'), + MONGODB_DATABASE: str('trading_documents', 'MongoDB database name'), + + // Authentication + MONGODB_USERNAME: str('trading_admin', 'MongoDB username'), + MONGODB_PASSWORD: str('', 'MongoDB password'), + MONGODB_AUTH_SOURCE: str('admin', 'MongoDB authentication database'), + + // Connection URI (alternative to individual settings) + MONGODB_URI: str('', 'Complete MongoDB connection URI (overrides individual settings)'), + + // Connection Pool Settings + MONGODB_MAX_POOL_SIZE: num(10, 'Maximum connection pool size'), + MONGODB_MIN_POOL_SIZE: num(0, 'Minimum connection pool size'), + MONGODB_MAX_IDLE_TIME: num(30000, 'Maximum idle time for connections in ms'), + + // Timeouts + MONGODB_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'), + MONGODB_SOCKET_TIMEOUT: num(30000, 'Socket timeout in ms'), + MONGODB_SERVER_SELECTION_TIMEOUT: num(5000, 'Server selection timeout in ms'), + + // SSL/TLS Settings + MONGODB_TLS: bool(false, 'Enable TLS for MongoDB connection'), + MONGODB_TLS_INSECURE: bool(false, 'Allow invalid certificates in TLS mode'), + MONGODB_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'), + + // Additional Settings + MONGODB_RETRY_WRITES: bool(true, 'Enable retryable writes'), + MONGODB_JOURNAL: bool(true, 'Enable write concern journal'), + MONGODB_READ_PREFERENCE: strWithChoices(['primary', 'primaryPreferred', 'secondary', 'secondaryPreferred', 'nearest'], 'primary', 'MongoDB read preference'), + MONGODB_WRITE_CONCERN: str('majority', 'Write concern level'), +}); + +// Export typed configuration object +export type MongoDbConfig = typeof mongodbConfig; + +// Export individual config values for convenience +export const { + MONGODB_HOST, + MONGODB_PORT, + MONGODB_DATABASE, + MONGODB_USERNAME, + MONGODB_PASSWORD, + MONGODB_AUTH_SOURCE, + MONGODB_URI, + MONGODB_MAX_POOL_SIZE, + MONGODB_MIN_POOL_SIZE, + MONGODB_MAX_IDLE_TIME, + MONGODB_CONNECT_TIMEOUT, + MONGODB_SOCKET_TIMEOUT, + MONGODB_SERVER_SELECTION_TIMEOUT, + MONGODB_TLS, + MONGODB_TLS_INSECURE, + MONGODB_TLS_CA_FILE, + MONGODB_RETRY_WRITES, + MONGODB_JOURNAL, + MONGODB_READ_PREFERENCE, + MONGODB_WRITE_CONCERN, +} = mongodbConfig; diff --git a/libs/config/src/monitoring.ts b/libs/config/src/monitoring.ts index d3ad510..8529efe 100644 --- a/libs/config/src/monitoring.ts +++ b/libs/config/src/monitoring.ts @@ -1,88 +1,88 @@ -/** - * Monitoring configuration using Yup - * Prometheus metrics, Grafana visualization, and Loki logging - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, port, bool, num, strWithChoices } = envValidators; - -/** - * Prometheus configuration with validation and defaults - */ -export const prometheusConfig = cleanEnv(process.env, { - // Prometheus Server - PROMETHEUS_HOST: str('localhost', 'Prometheus host'), - PROMETHEUS_PORT: port(9090, 'Prometheus port'), - PROMETHEUS_URL: str('', 'Complete Prometheus URL (overrides host/port)'), - - // Authentication - PROMETHEUS_USERNAME: str('', 'Prometheus username (if auth enabled)'), - PROMETHEUS_PASSWORD: str('', 'Prometheus password (if auth enabled)'), - - // Metrics Collection - PROMETHEUS_SCRAPE_INTERVAL: str('15s', 'Default scrape interval'), - PROMETHEUS_EVALUATION_INTERVAL: str('15s', 'Rule evaluation interval'), - PROMETHEUS_RETENTION_TIME: str('15d', 'Data retention time'), - - // TLS Settings - PROMETHEUS_TLS_ENABLED: bool(false, 'Enable TLS for Prometheus'), - PROMETHEUS_TLS_INSECURE: bool(false, 'Skip TLS verification'), -}); - -/** - * Grafana configuration with validation and defaults - */ -export const grafanaConfig = cleanEnv(process.env, { - // Grafana Server - GRAFANA_HOST: str('localhost', 'Grafana host'), - GRAFANA_PORT: port(3000, 'Grafana port'), - GRAFANA_URL: str('', 'Complete Grafana URL (overrides host/port)'), - - // Authentication - GRAFANA_ADMIN_USER: str('admin', 'Grafana admin username'), - GRAFANA_ADMIN_PASSWORD: str('admin', 'Grafana admin password'), - - // Security Settings - GRAFANA_ALLOW_SIGN_UP: bool(false, 'Allow user sign up'), - GRAFANA_SECRET_KEY: str('', 'Grafana secret key for encryption'), - - // Database Settings - GRAFANA_DATABASE_TYPE: strWithChoices(['mysql', 'postgres', 'sqlite3'], 'sqlite3', 'Grafana database type'), - GRAFANA_DATABASE_URL: str('', 'Grafana database URL'), - - // Feature Flags - GRAFANA_DISABLE_GRAVATAR: bool(true, 'Disable Gravatar avatars'), - GRAFANA_ENABLE_GZIP: bool(true, 'Enable gzip compression'), -}); - -// Export typed configuration objects -export type PrometheusConfig = typeof prometheusConfig; -export type GrafanaConfig = typeof grafanaConfig; - -// Export individual config values for convenience -export const { - PROMETHEUS_HOST, - PROMETHEUS_PORT, - PROMETHEUS_URL, - PROMETHEUS_USERNAME, - PROMETHEUS_PASSWORD, - PROMETHEUS_SCRAPE_INTERVAL, - PROMETHEUS_EVALUATION_INTERVAL, - PROMETHEUS_RETENTION_TIME, - PROMETHEUS_TLS_ENABLED, - PROMETHEUS_TLS_INSECURE, -} = prometheusConfig; - -export const { - GRAFANA_HOST, - GRAFANA_PORT, - GRAFANA_URL, - GRAFANA_ADMIN_USER, - GRAFANA_ADMIN_PASSWORD, - GRAFANA_ALLOW_SIGN_UP, - GRAFANA_SECRET_KEY, - GRAFANA_DATABASE_TYPE, - GRAFANA_DATABASE_URL, - GRAFANA_DISABLE_GRAVATAR, - GRAFANA_ENABLE_GZIP, -} = grafanaConfig; +/** + * Monitoring configuration using Yup + * Prometheus metrics, Grafana visualization, and Loki logging + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, port, bool, num, strWithChoices } = envValidators; + +/** + * Prometheus configuration with validation and defaults + */ +export const prometheusConfig = cleanEnv(process.env, { + // Prometheus Server + PROMETHEUS_HOST: str('localhost', 'Prometheus host'), + PROMETHEUS_PORT: port(9090, 'Prometheus port'), + PROMETHEUS_URL: str('', 'Complete Prometheus URL (overrides host/port)'), + + // Authentication + PROMETHEUS_USERNAME: str('', 'Prometheus username (if auth enabled)'), + PROMETHEUS_PASSWORD: str('', 'Prometheus password (if auth enabled)'), + + // Metrics Collection + PROMETHEUS_SCRAPE_INTERVAL: str('15s', 'Default scrape interval'), + PROMETHEUS_EVALUATION_INTERVAL: str('15s', 'Rule evaluation interval'), + PROMETHEUS_RETENTION_TIME: str('15d', 'Data retention time'), + + // TLS Settings + PROMETHEUS_TLS_ENABLED: bool(false, 'Enable TLS for Prometheus'), + PROMETHEUS_TLS_INSECURE: bool(false, 'Skip TLS verification'), +}); + +/** + * Grafana configuration with validation and defaults + */ +export const grafanaConfig = cleanEnv(process.env, { + // Grafana Server + GRAFANA_HOST: str('localhost', 'Grafana host'), + GRAFANA_PORT: port(3000, 'Grafana port'), + GRAFANA_URL: str('', 'Complete Grafana URL (overrides host/port)'), + + // Authentication + GRAFANA_ADMIN_USER: str('admin', 'Grafana admin username'), + GRAFANA_ADMIN_PASSWORD: str('admin', 'Grafana admin password'), + + // Security Settings + GRAFANA_ALLOW_SIGN_UP: bool(false, 'Allow user sign up'), + GRAFANA_SECRET_KEY: str('', 'Grafana secret key for encryption'), + + // Database Settings + GRAFANA_DATABASE_TYPE: strWithChoices(['mysql', 'postgres', 'sqlite3'], 'sqlite3', 'Grafana database type'), + GRAFANA_DATABASE_URL: str('', 'Grafana database URL'), + + // Feature Flags + GRAFANA_DISABLE_GRAVATAR: bool(true, 'Disable Gravatar avatars'), + GRAFANA_ENABLE_GZIP: bool(true, 'Enable gzip compression'), +}); + +// Export typed configuration objects +export type PrometheusConfig = typeof prometheusConfig; +export type GrafanaConfig = typeof grafanaConfig; + +// Export individual config values for convenience +export const { + PROMETHEUS_HOST, + PROMETHEUS_PORT, + PROMETHEUS_URL, + PROMETHEUS_USERNAME, + PROMETHEUS_PASSWORD, + PROMETHEUS_SCRAPE_INTERVAL, + PROMETHEUS_EVALUATION_INTERVAL, + PROMETHEUS_RETENTION_TIME, + PROMETHEUS_TLS_ENABLED, + PROMETHEUS_TLS_INSECURE, +} = prometheusConfig; + +export const { + GRAFANA_HOST, + GRAFANA_PORT, + GRAFANA_URL, + GRAFANA_ADMIN_USER, + GRAFANA_ADMIN_PASSWORD, + GRAFANA_ALLOW_SIGN_UP, + GRAFANA_SECRET_KEY, + GRAFANA_DATABASE_TYPE, + GRAFANA_DATABASE_URL, + GRAFANA_DISABLE_GRAVATAR, + GRAFANA_ENABLE_GZIP, +} = grafanaConfig; diff --git a/libs/config/src/postgres.ts b/libs/config/src/postgres.ts index 02a056d..360b34b 100644 --- a/libs/config/src/postgres.ts +++ b/libs/config/src/postgres.ts @@ -1,56 +1,56 @@ -/** - * PostgreSQL configuration using Yup - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, port, bool, num } = envValidators; - -/** - * PostgreSQL configuration with validation and defaults - */ -export const postgresConfig = cleanEnv(process.env, { - // PostgreSQL Connection Settings - POSTGRES_HOST: str('localhost', 'PostgreSQL host'), - POSTGRES_PORT: port(5432, 'PostgreSQL port'), - POSTGRES_DATABASE: str('stockbot', 'PostgreSQL database name'), - POSTGRES_USERNAME: str('stockbot', 'PostgreSQL username'), - POSTGRES_PASSWORD: str('', 'PostgreSQL password'), - - // Connection Pool Settings - POSTGRES_POOL_MIN: num(2, 'Minimum pool connections'), - POSTGRES_POOL_MAX: num(10, 'Maximum pool connections'), - POSTGRES_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'), - - // SSL Configuration - POSTGRES_SSL: bool(false, 'Enable SSL for PostgreSQL connection'), - POSTGRES_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'), - - // Additional Settings - POSTGRES_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'), - POSTGRES_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'), - POSTGRES_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'), - POSTGRES_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'), - POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'), -}); - -// Export typed configuration object -export type PostgresConfig = typeof postgresConfig; - -// Export individual config values for convenience -export const { - POSTGRES_HOST, - POSTGRES_PORT, - POSTGRES_DATABASE, - POSTGRES_USERNAME, - POSTGRES_PASSWORD, - POSTGRES_POOL_MIN, - POSTGRES_POOL_MAX, - POSTGRES_POOL_IDLE_TIMEOUT, - POSTGRES_SSL, - POSTGRES_SSL_REJECT_UNAUTHORIZED, - POSTGRES_QUERY_TIMEOUT, - POSTGRES_CONNECTION_TIMEOUT, - POSTGRES_STATEMENT_TIMEOUT, - POSTGRES_LOCK_TIMEOUT, - POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT, -} = postgresConfig; +/** + * PostgreSQL configuration using Yup + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, port, bool, num } = envValidators; + +/** + * PostgreSQL configuration with validation and defaults + */ +export const postgresConfig = cleanEnv(process.env, { + // PostgreSQL Connection Settings + POSTGRES_HOST: str('localhost', 'PostgreSQL host'), + POSTGRES_PORT: port(5432, 'PostgreSQL port'), + POSTGRES_DATABASE: str('stockbot', 'PostgreSQL database name'), + POSTGRES_USERNAME: str('stockbot', 'PostgreSQL username'), + POSTGRES_PASSWORD: str('', 'PostgreSQL password'), + + // Connection Pool Settings + POSTGRES_POOL_MIN: num(2, 'Minimum pool connections'), + POSTGRES_POOL_MAX: num(10, 'Maximum pool connections'), + POSTGRES_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'), + + // SSL Configuration + POSTGRES_SSL: bool(false, 'Enable SSL for PostgreSQL connection'), + POSTGRES_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'), + + // Additional Settings + POSTGRES_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'), + POSTGRES_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'), + POSTGRES_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'), + POSTGRES_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'), + POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'), +}); + +// Export typed configuration object +export type PostgresConfig = typeof postgresConfig; + +// Export individual config values for convenience +export const { + POSTGRES_HOST, + POSTGRES_PORT, + POSTGRES_DATABASE, + POSTGRES_USERNAME, + POSTGRES_PASSWORD, + POSTGRES_POOL_MIN, + POSTGRES_POOL_MAX, + POSTGRES_POOL_IDLE_TIMEOUT, + POSTGRES_SSL, + POSTGRES_SSL_REJECT_UNAUTHORIZED, + POSTGRES_QUERY_TIMEOUT, + POSTGRES_CONNECTION_TIMEOUT, + POSTGRES_STATEMENT_TIMEOUT, + POSTGRES_LOCK_TIMEOUT, + POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT, +} = postgresConfig; diff --git a/libs/config/src/questdb.ts b/libs/config/src/questdb.ts index a0ab933..47c094f 100644 --- a/libs/config/src/questdb.ts +++ b/libs/config/src/questdb.ts @@ -1,55 +1,55 @@ -/** - * QuestDB configuration using Yup - * Time-series database for OHLCV data, indicators, and performance metrics - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, port, bool, num } = envValidators; - -/** - * QuestDB configuration with validation and defaults - */ -export const questdbConfig = cleanEnv(process.env, { - // QuestDB Connection - QUESTDB_HOST: str('localhost', 'QuestDB host'), - QUESTDB_HTTP_PORT: port(9000, 'QuestDB HTTP port (web console)'), - QUESTDB_PG_PORT: port(8812, 'QuestDB PostgreSQL wire protocol port'), - QUESTDB_INFLUX_PORT: port(9009, 'QuestDB InfluxDB line protocol port'), - - // Authentication (if enabled) - QUESTDB_USER: str('', 'QuestDB username (if auth enabled)'), - QUESTDB_PASSWORD: str('', 'QuestDB password (if auth enabled)'), - - // Connection Settings - QUESTDB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'), - QUESTDB_REQUEST_TIMEOUT: num(30000, 'Request timeout in ms'), - QUESTDB_RETRY_ATTEMPTS: num(3, 'Number of retry attempts'), - - // TLS Settings - QUESTDB_TLS_ENABLED: bool(false, 'Enable TLS for QuestDB connection'), - QUESTDB_TLS_VERIFY_SERVER_CERT: bool(true, 'Verify server certificate'), - - // Database Settings - QUESTDB_DEFAULT_DATABASE: str('qdb', 'Default database name'), - QUESTDB_TELEMETRY_ENABLED: bool(false, 'Enable telemetry'), -}); - -// Export typed configuration object -export type QuestDbConfig = typeof questdbConfig; - -// Export individual config values for convenience -export const { - QUESTDB_HOST, - QUESTDB_HTTP_PORT, - QUESTDB_PG_PORT, - QUESTDB_INFLUX_PORT, - QUESTDB_USER, - QUESTDB_PASSWORD, - QUESTDB_CONNECTION_TIMEOUT, - QUESTDB_REQUEST_TIMEOUT, - QUESTDB_RETRY_ATTEMPTS, - QUESTDB_TLS_ENABLED, - QUESTDB_TLS_VERIFY_SERVER_CERT, - QUESTDB_DEFAULT_DATABASE, - QUESTDB_TELEMETRY_ENABLED, -} = questdbConfig; +/** + * QuestDB configuration using Yup + * Time-series database for OHLCV data, indicators, and performance metrics + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, port, bool, num } = envValidators; + +/** + * QuestDB configuration with validation and defaults + */ +export const questdbConfig = cleanEnv(process.env, { + // QuestDB Connection + QUESTDB_HOST: str('localhost', 'QuestDB host'), + QUESTDB_HTTP_PORT: port(9000, 'QuestDB HTTP port (web console)'), + QUESTDB_PG_PORT: port(8812, 'QuestDB PostgreSQL wire protocol port'), + QUESTDB_INFLUX_PORT: port(9009, 'QuestDB InfluxDB line protocol port'), + + // Authentication (if enabled) + QUESTDB_USER: str('', 'QuestDB username (if auth enabled)'), + QUESTDB_PASSWORD: str('', 'QuestDB password (if auth enabled)'), + + // Connection Settings + QUESTDB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'), + QUESTDB_REQUEST_TIMEOUT: num(30000, 'Request timeout in ms'), + QUESTDB_RETRY_ATTEMPTS: num(3, 'Number of retry attempts'), + + // TLS Settings + QUESTDB_TLS_ENABLED: bool(false, 'Enable TLS for QuestDB connection'), + QUESTDB_TLS_VERIFY_SERVER_CERT: bool(true, 'Verify server certificate'), + + // Database Settings + QUESTDB_DEFAULT_DATABASE: str('qdb', 'Default database name'), + QUESTDB_TELEMETRY_ENABLED: bool(false, 'Enable telemetry'), +}); + +// Export typed configuration object +export type QuestDbConfig = typeof questdbConfig; + +// Export individual config values for convenience +export const { + QUESTDB_HOST, + QUESTDB_HTTP_PORT, + QUESTDB_PG_PORT, + QUESTDB_INFLUX_PORT, + QUESTDB_USER, + QUESTDB_PASSWORD, + QUESTDB_CONNECTION_TIMEOUT, + QUESTDB_REQUEST_TIMEOUT, + QUESTDB_RETRY_ATTEMPTS, + QUESTDB_TLS_ENABLED, + QUESTDB_TLS_VERIFY_SERVER_CERT, + QUESTDB_DEFAULT_DATABASE, + QUESTDB_TELEMETRY_ENABLED, +} = questdbConfig; diff --git a/libs/config/src/risk.ts b/libs/config/src/risk.ts index ce2ad47..3c70a3f 100644 --- a/libs/config/src/risk.ts +++ b/libs/config/src/risk.ts @@ -1,80 +1,80 @@ -/** - * Risk management configuration using Yup - */ -import { cleanEnv, envValidators } from './env-utils'; - -const { str, num, bool, strWithChoices } = envValidators; - -/** - * Risk configuration with validation and defaults - */ -export const riskConfig = cleanEnv(process.env, { - // Position Sizing - RISK_MAX_POSITION_SIZE: num(0.1, 'Maximum position size as percentage of portfolio'), - RISK_MAX_PORTFOLIO_EXPOSURE: num(0.8, 'Maximum portfolio exposure percentage'), - RISK_MAX_SINGLE_ASSET_EXPOSURE: num(0.2, 'Maximum exposure to single asset'), - RISK_MAX_SECTOR_EXPOSURE: num(0.3, 'Maximum exposure to single sector'), - - // Stop Loss and Take Profit - RISK_DEFAULT_STOP_LOSS: num(0.05, 'Default stop loss percentage'), - RISK_DEFAULT_TAKE_PROFIT: num(0.15, 'Default take profit percentage'), - RISK_TRAILING_STOP_ENABLED: bool(true, 'Enable trailing stop losses'), - RISK_TRAILING_STOP_DISTANCE: num(0.03, 'Trailing stop distance percentage'), - - // Risk Limits - RISK_MAX_DAILY_LOSS: num(0.05, 'Maximum daily loss percentage'), - RISK_MAX_WEEKLY_LOSS: num(0.1, 'Maximum weekly loss percentage'), - RISK_MAX_MONTHLY_LOSS: num(0.2, 'Maximum monthly loss percentage'), - - // Volatility Controls - RISK_MAX_VOLATILITY_THRESHOLD: num(0.4, 'Maximum volatility threshold'), - RISK_VOLATILITY_LOOKBACK_DAYS: num(20, 'Volatility calculation lookback period'), - - // Correlation Controls - RISK_MAX_CORRELATION_THRESHOLD: num(0.7, 'Maximum correlation between positions'), - RISK_CORRELATION_LOOKBACK_DAYS: num(60, 'Correlation calculation lookback period'), - - // Leverage Controls - RISK_MAX_LEVERAGE: num(2.0, 'Maximum leverage allowed'), - RISK_MARGIN_CALL_THRESHOLD: num(0.3, 'Margin call threshold'), - - // Circuit Breakers - RISK_CIRCUIT_BREAKER_ENABLED: bool(true, 'Enable circuit breakers'), - RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD: num(0.1, 'Circuit breaker loss threshold'), - RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES: num(60, 'Circuit breaker cooldown period'), - - // Risk Model - RISK_MODEL_TYPE: strWithChoices(['var', 'cvar', 'expected_shortfall'], 'var', 'Risk model type'), - RISK_CONFIDENCE_LEVEL: num(0.95, 'Risk model confidence level'), - RISK_TIME_HORIZON_DAYS: num(1, 'Risk time horizon in days'), -}); - -// Export typed configuration object -export type RiskConfig = typeof riskConfig; - -// Export individual config values for convenience -export const { - RISK_MAX_POSITION_SIZE, - RISK_MAX_PORTFOLIO_EXPOSURE, - RISK_MAX_SINGLE_ASSET_EXPOSURE, - RISK_MAX_SECTOR_EXPOSURE, - RISK_DEFAULT_STOP_LOSS, - RISK_DEFAULT_TAKE_PROFIT, - RISK_TRAILING_STOP_ENABLED, - RISK_TRAILING_STOP_DISTANCE, - RISK_MAX_DAILY_LOSS, - RISK_MAX_WEEKLY_LOSS, - RISK_MAX_MONTHLY_LOSS, - RISK_MAX_VOLATILITY_THRESHOLD, - RISK_VOLATILITY_LOOKBACK_DAYS, - RISK_MAX_CORRELATION_THRESHOLD, - RISK_CORRELATION_LOOKBACK_DAYS, - RISK_MAX_LEVERAGE, - RISK_MARGIN_CALL_THRESHOLD, - RISK_CIRCUIT_BREAKER_ENABLED, - RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD, - RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES, - RISK_MODEL_TYPE, - RISK_CONFIDENCE_LEVEL, - RISK_TIME_HORIZON_DAYS, -} = riskConfig; +/** + * Risk management configuration using Yup + */ +import { cleanEnv, envValidators } from './env-utils'; + +const { str, num, bool, strWithChoices } = envValidators; + +/** + * Risk configuration with validation and defaults + */ +export const riskConfig = cleanEnv(process.env, { + // Position Sizing + RISK_MAX_POSITION_SIZE: num(0.1, 'Maximum position size as percentage of portfolio'), + RISK_MAX_PORTFOLIO_EXPOSURE: num(0.8, 'Maximum portfolio exposure percentage'), + RISK_MAX_SINGLE_ASSET_EXPOSURE: num(0.2, 'Maximum exposure to single asset'), + RISK_MAX_SECTOR_EXPOSURE: num(0.3, 'Maximum exposure to single sector'), + + // Stop Loss and Take Profit + RISK_DEFAULT_STOP_LOSS: num(0.05, 'Default stop loss percentage'), + RISK_DEFAULT_TAKE_PROFIT: num(0.15, 'Default take profit percentage'), + RISK_TRAILING_STOP_ENABLED: bool(true, 'Enable trailing stop losses'), + RISK_TRAILING_STOP_DISTANCE: num(0.03, 'Trailing stop distance percentage'), + + // Risk Limits + RISK_MAX_DAILY_LOSS: num(0.05, 'Maximum daily loss percentage'), + RISK_MAX_WEEKLY_LOSS: num(0.1, 'Maximum weekly loss percentage'), + RISK_MAX_MONTHLY_LOSS: num(0.2, 'Maximum monthly loss percentage'), + + // Volatility Controls + RISK_MAX_VOLATILITY_THRESHOLD: num(0.4, 'Maximum volatility threshold'), + RISK_VOLATILITY_LOOKBACK_DAYS: num(20, 'Volatility calculation lookback period'), + + // Correlation Controls + RISK_MAX_CORRELATION_THRESHOLD: num(0.7, 'Maximum correlation between positions'), + RISK_CORRELATION_LOOKBACK_DAYS: num(60, 'Correlation calculation lookback period'), + + // Leverage Controls + RISK_MAX_LEVERAGE: num(2.0, 'Maximum leverage allowed'), + RISK_MARGIN_CALL_THRESHOLD: num(0.3, 'Margin call threshold'), + + // Circuit Breakers + RISK_CIRCUIT_BREAKER_ENABLED: bool(true, 'Enable circuit breakers'), + RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD: num(0.1, 'Circuit breaker loss threshold'), + RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES: num(60, 'Circuit breaker cooldown period'), + + // Risk Model + RISK_MODEL_TYPE: strWithChoices(['var', 'cvar', 'expected_shortfall'], 'var', 'Risk model type'), + RISK_CONFIDENCE_LEVEL: num(0.95, 'Risk model confidence level'), + RISK_TIME_HORIZON_DAYS: num(1, 'Risk time horizon in days'), +}); + +// Export typed configuration object +export type RiskConfig = typeof riskConfig; + +// Export individual config values for convenience +export const { + RISK_MAX_POSITION_SIZE, + RISK_MAX_PORTFOLIO_EXPOSURE, + RISK_MAX_SINGLE_ASSET_EXPOSURE, + RISK_MAX_SECTOR_EXPOSURE, + RISK_DEFAULT_STOP_LOSS, + RISK_DEFAULT_TAKE_PROFIT, + RISK_TRAILING_STOP_ENABLED, + RISK_TRAILING_STOP_DISTANCE, + RISK_MAX_DAILY_LOSS, + RISK_MAX_WEEKLY_LOSS, + RISK_MAX_MONTHLY_LOSS, + RISK_MAX_VOLATILITY_THRESHOLD, + RISK_VOLATILITY_LOOKBACK_DAYS, + RISK_MAX_CORRELATION_THRESHOLD, + RISK_CORRELATION_LOOKBACK_DAYS, + RISK_MAX_LEVERAGE, + RISK_MARGIN_CALL_THRESHOLD, + RISK_CIRCUIT_BREAKER_ENABLED, + RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD, + RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES, + RISK_MODEL_TYPE, + RISK_CONFIDENCE_LEVEL, + RISK_TIME_HORIZON_DAYS, +} = riskConfig; diff --git a/libs/config/test-config.mjs b/libs/config/test-config.mjs index 08276a8..df8ac0f 100644 --- a/libs/config/test-config.mjs +++ b/libs/config/test-config.mjs @@ -1,85 +1,85 @@ -import { - databaseConfig, - questdbConfig, - mongodbConfig, - dragonflyConfig, - prometheusConfig, - grafanaConfig, - lokiConfig, - loggingConfig -} from './dist/index'; - -// Set test environment variables -process.env.NODE_ENV = 'test'; -process.env.PORT = '3001'; - -// Database configs -process.env.DB_HOST = 'localhost'; -process.env.DB_PORT = '5432'; -process.env.DB_NAME = 'test_db'; -process.env.DB_USER = 'test_user'; -process.env.DB_PASSWORD = 'test_pass'; - -// QuestDB configs -process.env.QUESTDB_HOST = 'localhost'; -process.env.QUESTDB_HTTP_PORT = '9000'; -process.env.QUESTDB_PG_PORT = '8812'; - -// MongoDB configs -process.env.MONGODB_HOST = 'localhost'; -process.env.MONGODB_PORT = '27017'; -process.env.MONGODB_DATABASE = 'test_db'; - -// Dragonfly configs -process.env.DRAGONFLY_HOST = 'localhost'; -process.env.DRAGONFLY_PORT = '6379'; - -// Monitoring configs -process.env.PROMETHEUS_HOST = 'localhost'; -process.env.PROMETHEUS_PORT = '9090'; -process.env.GRAFANA_HOST = 'localhost'; -process.env.GRAFANA_PORT = '3000'; - -// Loki configs -process.env.LOKI_HOST = 'localhost'; -process.env.LOKI_PORT = '3100'; - -// Logging configs -process.env.LOG_LEVEL = 'info'; -process.env.LOG_FORMAT = 'json'; - -console.log('πŸ” Testing configuration modules...\n'); - -const configs = [ - { name: 'Database', config: databaseConfig }, - { name: 'QuestDB', config: questdbConfig }, - { name: 'MongoDB', config: mongodbConfig }, - { name: 'Dragonfly', config: dragonflyConfig }, - { name: 'Prometheus', config: prometheusConfig }, - { name: 'Grafana', config: grafanaConfig }, - { name: 'Loki', config: lokiConfig }, - { name: 'Logging', config: loggingConfig }, -]; - -let successful = 0; - -for (const { name, config } of configs) { - try { - if (config && typeof config === 'object' && Object.keys(config).length > 0) { - console.log(`βœ… ${name}: Loaded successfully`); - successful++; - } else { - console.log(`❌ ${name}: Invalid config object`); - } - } catch (error) { - console.log(`❌ ${name}: ${error.message}`); - } -} - -console.log(`\nπŸ“Š Test Summary: ${successful}/${configs.length} modules loaded successfully`); - -if (successful === configs.length) { - console.log('πŸŽ‰ All configuration modules working correctly!'); -} else { - console.log('⚠️ Some configuration modules have issues.'); -} +import { + databaseConfig, + questdbConfig, + mongodbConfig, + dragonflyConfig, + prometheusConfig, + grafanaConfig, + lokiConfig, + loggingConfig +} from './dist/index'; + +// Set test environment variables +process.env.NODE_ENV = 'test'; +process.env.PORT = '3001'; + +// Database configs +process.env.DB_HOST = 'localhost'; +process.env.DB_PORT = '5432'; +process.env.DB_NAME = 'test_db'; +process.env.DB_USER = 'test_user'; +process.env.DB_PASSWORD = 'test_pass'; + +// QuestDB configs +process.env.QUESTDB_HOST = 'localhost'; +process.env.QUESTDB_HTTP_PORT = '9000'; +process.env.QUESTDB_PG_PORT = '8812'; + +// MongoDB configs +process.env.MONGODB_HOST = 'localhost'; +process.env.MONGODB_PORT = '27017'; +process.env.MONGODB_DATABASE = 'test_db'; + +// Dragonfly configs +process.env.DRAGONFLY_HOST = 'localhost'; +process.env.DRAGONFLY_PORT = '6379'; + +// Monitoring configs +process.env.PROMETHEUS_HOST = 'localhost'; +process.env.PROMETHEUS_PORT = '9090'; +process.env.GRAFANA_HOST = 'localhost'; +process.env.GRAFANA_PORT = '3000'; + +// Loki configs +process.env.LOKI_HOST = 'localhost'; +process.env.LOKI_PORT = '3100'; + +// Logging configs +process.env.LOG_LEVEL = 'info'; +process.env.LOG_FORMAT = 'json'; + +console.log('πŸ” Testing configuration modules...\n'); + +const configs = [ + { name: 'Database', config: databaseConfig }, + { name: 'QuestDB', config: questdbConfig }, + { name: 'MongoDB', config: mongodbConfig }, + { name: 'Dragonfly', config: dragonflyConfig }, + { name: 'Prometheus', config: prometheusConfig }, + { name: 'Grafana', config: grafanaConfig }, + { name: 'Loki', config: lokiConfig }, + { name: 'Logging', config: loggingConfig }, +]; + +let successful = 0; + +for (const { name, config } of configs) { + try { + if (config && typeof config === 'object' && Object.keys(config).length > 0) { + console.log(`βœ… ${name}: Loaded successfully`); + successful++; + } else { + console.log(`❌ ${name}: Invalid config object`); + } + } catch (error) { + console.log(`❌ ${name}: ${error.message}`); + } +} + +console.log(`\nπŸ“Š Test Summary: ${successful}/${configs.length} modules loaded successfully`); + +if (successful === configs.length) { + console.log('πŸŽ‰ All configuration modules working correctly!'); +} else { + console.log('⚠️ Some configuration modules have issues.'); +} diff --git a/libs/config/test/integration.test.ts b/libs/config/test/integration.test.ts index 24f413d..074d7bb 100644 --- a/libs/config/test/integration.test.ts +++ b/libs/config/test/integration.test.ts @@ -1,433 +1,433 @@ -/** - * Integration Tests for Config Library - * - * Tests the entire configuration system including module interactions, - * environment loading, validation across modules, and type exports. - */ - -import { describe, test, expect, beforeEach } from 'bun:test'; -import { setTestEnv, clearEnvVars, getMinimalTestEnv } from '../test/setup'; - -describe('Config Library Integration', () => { - beforeEach(() => { - // Clear module cache for clean state - // Note: Bun handles module caching differently than Jest - }); - - describe('Complete Configuration Loading', () => { test('should load all configuration modules successfully', async () => { - setTestEnv(getMinimalTestEnv()); - // Import all modules - const [ - { Environment, getEnvironment }, - { postgresConfig }, - { questdbConfig }, - { mongodbConfig }, - { loggingConfig }, - { riskConfig } - ] = await Promise.all([ - import('../src/core'), - import('../src/postgres'), - import('../src/questdb'), - import('../src/mongodb'), - import('../src/logging'), - import('../src/risk') - ]); - - // Verify all configs are loaded - expect(Environment).toBeDefined(); - expect(getEnvironment).toBeDefined(); - expect(postgresConfig).toBeDefined(); - expect(questdbConfig).toBeDefined(); - expect(mongodbConfig).toBeDefined(); - expect(loggingConfig).toBeDefined(); - expect(riskConfig).toBeDefined(); - // Verify core utilities - expect(getEnvironment()).toBe(Environment.Testing); // Should be Testing due to NODE_ENV=test in setup - expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); expect(questdbConfig.QUESTDB_HOST).toBe('localhost'); - expect(mongodbConfig.MONGODB_HOST).toBe('localhost'); // fix: use correct property - expect(loggingConfig.LOG_LEVEL).toBeDefined(); - expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); - }); test('should handle missing required environment variables gracefully', async () => { - setTestEnv({ - NODE_ENV: 'test' - // Missing required variables - }); - - // Should be able to load core utilities - const { Environment, getEnvironment } = await import('../src/core'); - expect(Environment).toBeDefined(); - expect(getEnvironment()).toBe(Environment.Testing); - // Should fail to load modules requiring specific vars (if they have required vars) - // Note: Most modules have defaults, so they might not throw - try { - const { postgresConfig } = await import('../src/postgres'); - expect(postgresConfig).toBeDefined(); - expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value - } catch (error) { - // If it throws, that's also acceptable behavior - expect(error).toBeDefined(); - } - }); test('should maintain consistency across environment detection', async () => { - setTestEnv({ - NODE_ENV: 'production', - ...getMinimalTestEnv() - }); - const [ - { Environment, getEnvironment }, - { postgresConfig }, - { questdbConfig }, - { mongodbConfig }, - { loggingConfig } - ] = await Promise.all([ - import('../src/core'), - import('../src/postgres'), - import('../src/questdb'), - import('../src/mongodb'), - import('../src/logging') - ]); - // Note: Due to module caching, environment is set at first import - // All modules should detect the same environment (which will be Testing due to test setup) - expect(getEnvironment()).toBe(Environment.Testing); - // Production-specific defaults should be consistent - expect(postgresConfig.POSTGRES_SSL).toBe(false); // default is false unless overridden expect(questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // checking actual property name - expect(mongodbConfig.MONGODB_TLS).toBe(false); // checking actual property name - expect(loggingConfig.LOG_FORMAT).toBe('json'); - }); - }); - - describe('Main Index Exports', () => { test('should export all configuration objects from index', async () => { - setTestEnv(getMinimalTestEnv()); - - const config = await import('../src/index'); - - // Core utilities (no coreConfig object) - expect(config.Environment).toBeDefined(); - expect(config.getEnvironment).toBeDefined(); - expect(config.ConfigurationError).toBeDefined(); - - // Configuration objects - expect(config.postgresConfig).toBeDefined(); - expect(config.questdbConfig).toBeDefined(); - expect(config.mongodbConfig).toBeDefined(); - expect(config.loggingConfig).toBeDefined(); - expect(config.riskConfig).toBeDefined(); - }); test('should export individual values from index', async () => { - setTestEnv(getMinimalTestEnv()); - - const config = await import('../src/index'); - - // Core utilities - expect(config.Environment).toBeDefined(); - expect(config.getEnvironment).toBeDefined(); - - // Individual configuration values exported from modules - expect(config.POSTGRES_HOST).toBeDefined(); - expect(config.POSTGRES_PORT).toBeDefined(); - expect(config.QUESTDB_HOST).toBeDefined(); - expect(config.MONGODB_HOST).toBeDefined(); - - // Risk values - expect(config.RISK_MAX_POSITION_SIZE).toBeDefined(); - expect(config.RISK_MAX_DAILY_LOSS).toBeDefined(); - - // Logging values - expect(config.LOG_LEVEL).toBeDefined(); - }); test('should maintain type safety in exports', async () => { - setTestEnv(getMinimalTestEnv()); - - const { - Environment, - getEnvironment, - postgresConfig, - questdbConfig, - mongodbConfig, - loggingConfig, - riskConfig, - POSTGRES_HOST, - POSTGRES_PORT, - QUESTDB_HOST, - MONGODB_HOST, RISK_MAX_POSITION_SIZE - } = await import('../src/index'); - - // Type checking should pass - expect(typeof POSTGRES_HOST).toBe('string'); - expect(typeof POSTGRES_PORT).toBe('number'); - expect(typeof QUESTDB_HOST).toBe('string'); - expect(typeof MONGODB_HOST).toBe('string'); - expect(typeof RISK_MAX_POSITION_SIZE).toBe('number'); - - // Configuration objects should have expected shapes - expect(postgresConfig).toHaveProperty('POSTGRES_HOST'); - expect(postgresConfig).toHaveProperty('POSTGRES_PORT'); - expect(questdbConfig).toHaveProperty('QUESTDB_HOST'); - expect(mongodbConfig).toHaveProperty('MONGODB_HOST'); - expect(loggingConfig).toHaveProperty('LOG_LEVEL'); - expect(riskConfig).toHaveProperty('RISK_MAX_POSITION_SIZE'); - }); - }); - describe('Environment Variable Validation', () => { - test('should validate environment variables across all modules', async () => { - setTestEnv({ - NODE_ENV: 'test', - LOG_LEVEL: 'info', // valid level - POSTGRES_HOST: 'localhost', - POSTGRES_DATABASE: 'test', - POSTGRES_USERNAME: 'test', - POSTGRES_PASSWORD: 'test', - QUESTDB_HOST: 'localhost', - MONGODB_HOST: 'localhost', - MONGODB_DATABASE: 'test', - RISK_MAX_POSITION_SIZE: '0.1', - RISK_MAX_DAILY_LOSS: '0.05' - }); // All imports should succeed with valid config - const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([ - import('../src/core'), - import('../src/postgres'), - import('../src/questdb'), - import('../src/mongodb'), - import('../src/logging'), - import('../src/risk') - ]); - - expect(core.getEnvironment()).toBe(core.Environment.Testing); // default test env - expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost'); - expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost'); - expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost'); - expect(logging.loggingConfig.LOG_LEVEL).toBe('info'); // set in test - expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // from test env - }); test('should accept valid environment variables across all modules', async () => { - setTestEnv({ - NODE_ENV: 'development', - LOG_LEVEL: 'debug', - - POSTGRES_HOST: 'localhost', - POSTGRES_PORT: '5432', - POSTGRES_DATABASE: 'stockbot_dev', - POSTGRES_USERNAME: 'dev_user', - POSTGRES_PASSWORD: 'dev_pass', - POSTGRES_SSL: 'false', - - QUESTDB_HOST: 'localhost', - QUESTDB_HTTP_PORT: '9000', - QUESTDB_PG_PORT: '8812', - - MONGODB_HOST: 'localhost', - MONGODB_DATABASE: 'stockbot_dev', - - RISK_MAX_POSITION_SIZE: '0.25', - RISK_MAX_DAILY_LOSS: '0.025', - - LOG_FORMAT: 'json', - LOG_FILE_ENABLED: 'false' - }); - - // All imports should succeed - const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([ - import('../src/core'), - import('../src/postgres'), - import('../src/questdb'), - import('../src/mongodb'), - import('../src/logging'), - import('../src/risk') - ]); - - // Since this is the first test to set NODE_ENV to development and modules might not be cached yet, - // this could actually change the environment. Let's test what we actually get. - expect(core.getEnvironment()).toBeDefined(); // Just verify it returns something valid - expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost'); - expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost'); - expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost'); - expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default value - expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value - }); - }); - - describe('Configuration Consistency', () => { test('should maintain consistent SSL settings across databases', async () => { - setTestEnv({ - NODE_ENV: 'production', - POSTGRES_HOST: 'prod-postgres.com', - POSTGRES_DATABASE: 'prod_db', - POSTGRES_USERNAME: 'prod_user', - POSTGRES_PASSWORD: 'prod_pass', - QUESTDB_HOST: 'prod-questdb.com', - MONGODB_HOST: 'prod-mongo.com', - MONGODB_DATABASE: 'prod_db', - RISK_MAX_POSITION_SIZE: '0.1', - RISK_MAX_DAILY_LOSS: '0.05' - // SSL settings not explicitly set - should use defaults - }); - - const [postgres, questdb, mongodb] = await Promise.all([ - import('../src/postgres'), - import('../src/questdb'), - import('../src/mongodb') - ]); - - // Check actual SSL property names and their default values expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default is false - expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // default is false - expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); // default is false - }); test('should maintain consistent environment detection across modules', async () => { - setTestEnv({ - NODE_ENV: 'staging', - ...getMinimalTestEnv() - }); - - const [core, logging] = await Promise.all([ - import('../src/core'), - import('../src/logging') - ]); - expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists - - // The setTestEnv call above doesn't actually change the real NODE_ENV because modules cache it - // So we check that the test setup is working correctly - expect(process.env.NODE_ENV).toBe('test'); // This is what's actually set in test environment - }); - }); - - describe('Performance and Caching', () => { test('should cache configuration values between imports', async () => { - setTestEnv(getMinimalTestEnv()); - - // Import the same module multiple times - const postgres1 = await import('../src/postgres'); - const postgres2 = await import('../src/postgres'); - const postgres3 = await import('../src/postgres'); - - // Should return the same object reference (cached) - expect(postgres1.postgresConfig).toBe(postgres2.postgresConfig); - expect(postgres2.postgresConfig).toBe(postgres3.postgresConfig); - }); - - test('should handle rapid sequential imports', async () => { - setTestEnv(getMinimalTestEnv()); - - // Import all modules simultaneously - const startTime = Date.now(); - - await Promise.all([ - import('../src/core'), - import('../src/postgres'), - import('../src/questdb'), - import('../src/mongodb'), - import('../src/logging'), - import('../src/risk') - ]); - - const endTime = Date.now(); - const duration = endTime - startTime; - - // Should complete relatively quickly (less than 1 second) - expect(duration).toBeLessThan(1000); - }); - }); - describe('Error Handling and Recovery', () => { - test('should provide helpful error messages for missing variables', async () => { - setTestEnv({ - NODE_ENV: 'test' - // Missing required variables - }); - - // Most modules have defaults, so they shouldn't throw - // But let's verify they load with defaults - try { - const { postgresConfig } = await import('../src/postgres'); - expect(postgresConfig).toBeDefined(); - expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value - } catch (error) { - // If it throws, check that error message is helpful - expect((error as Error).message).toBeTruthy(); - } - - try { - const { riskConfig } = await import('../src/risk'); - expect(riskConfig).toBeDefined(); - expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value - } catch (error) { - // If it throws, check that error message is helpful - expect((error as Error).message).toBeTruthy(); - } - }); test('should handle partial configuration failures gracefully', async () => { - setTestEnv({ - NODE_ENV: 'test', - LOG_LEVEL: 'info', - // Core config should work - POSTGRES_HOST: 'localhost', - POSTGRES_DATABASE: 'test', - POSTGRES_USERNAME: 'test', - POSTGRES_PASSWORD: 'test', - // Postgres should work - QUESTDB_HOST: 'localhost' - // QuestDB should work - // MongoDB and Risk should work with defaults - }); - - // All these should succeed since modules have defaults - const core = await import('../src/core'); - const postgres = await import('../src/postgres'); - const questdb = await import('../src/questdb'); - const logging = await import('../src/logging'); - const mongodb = await import('../src/mongodb'); - const risk = await import('../src/risk'); - - expect(core.Environment).toBeDefined(); - expect(postgres.postgresConfig).toBeDefined(); - expect(questdb.questdbConfig).toBeDefined(); - expect(logging.loggingConfig).toBeDefined(); - expect(mongodb.mongodbConfig).toBeDefined(); - expect(risk.riskConfig).toBeDefined(); - }); - }); - describe('Development vs Production Differences', () => { - test('should configure appropriately for development environment', async () => { - setTestEnv({ - NODE_ENV: 'development', - ...getMinimalTestEnv(), - POSTGRES_SSL: undefined, // Should default to false - QUESTDB_TLS_ENABLED: undefined, // Should default to false - MONGODB_TLS: undefined, // Should default to false - LOG_FORMAT: undefined, // Should default to json - RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true - }); - - const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([ - import('../src/core'), - import('../src/postgres'), - import('../src/questdb'), - import('../src/mongodb'), - import('../src/logging'), - import('../src/risk') - ]); - expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists - expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); - expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); - expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default - expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true); // default - }); - - test('should configure appropriately for production environment', async () => { - setTestEnv({ - NODE_ENV: 'production', - ...getMinimalTestEnv(), - POSTGRES_SSL: undefined, // Should default to false (same as dev) - QUESTDB_TLS_ENABLED: undefined, // Should default to false - MONGODB_TLS: undefined, // Should default to false - LOG_FORMAT: undefined, // Should default to json - RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true - }); - - const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([ - import('../src/core'), - import('../src/postgres'), - import('../src/questdb'), - import('../src/mongodb'), - import('../src/logging'), - import('../src/risk') ]); - - expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists - expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default doesn't change by env - expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); - expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); - expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); - expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true); - }); - }); -}); +/** + * Integration Tests for Config Library + * + * Tests the entire configuration system including module interactions, + * environment loading, validation across modules, and type exports. + */ + +import { describe, test, expect, beforeEach } from 'bun:test'; +import { setTestEnv, clearEnvVars, getMinimalTestEnv } from '../test/setup'; + +describe('Config Library Integration', () => { + beforeEach(() => { + // Clear module cache for clean state + // Note: Bun handles module caching differently than Jest + }); + + describe('Complete Configuration Loading', () => { test('should load all configuration modules successfully', async () => { + setTestEnv(getMinimalTestEnv()); + // Import all modules + const [ + { Environment, getEnvironment }, + { postgresConfig }, + { questdbConfig }, + { mongodbConfig }, + { loggingConfig }, + { riskConfig } + ] = await Promise.all([ + import('../src/core'), + import('../src/postgres'), + import('../src/questdb'), + import('../src/mongodb'), + import('../src/logging'), + import('../src/risk') + ]); + + // Verify all configs are loaded + expect(Environment).toBeDefined(); + expect(getEnvironment).toBeDefined(); + expect(postgresConfig).toBeDefined(); + expect(questdbConfig).toBeDefined(); + expect(mongodbConfig).toBeDefined(); + expect(loggingConfig).toBeDefined(); + expect(riskConfig).toBeDefined(); + // Verify core utilities + expect(getEnvironment()).toBe(Environment.Testing); // Should be Testing due to NODE_ENV=test in setup + expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); expect(questdbConfig.QUESTDB_HOST).toBe('localhost'); + expect(mongodbConfig.MONGODB_HOST).toBe('localhost'); // fix: use correct property + expect(loggingConfig.LOG_LEVEL).toBeDefined(); + expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); + }); test('should handle missing required environment variables gracefully', async () => { + setTestEnv({ + NODE_ENV: 'test' + // Missing required variables + }); + + // Should be able to load core utilities + const { Environment, getEnvironment } = await import('../src/core'); + expect(Environment).toBeDefined(); + expect(getEnvironment()).toBe(Environment.Testing); + // Should fail to load modules requiring specific vars (if they have required vars) + // Note: Most modules have defaults, so they might not throw + try { + const { postgresConfig } = await import('../src/postgres'); + expect(postgresConfig).toBeDefined(); + expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value + } catch (error) { + // If it throws, that's also acceptable behavior + expect(error).toBeDefined(); + } + }); test('should maintain consistency across environment detection', async () => { + setTestEnv({ + NODE_ENV: 'production', + ...getMinimalTestEnv() + }); + const [ + { Environment, getEnvironment }, + { postgresConfig }, + { questdbConfig }, + { mongodbConfig }, + { loggingConfig } + ] = await Promise.all([ + import('../src/core'), + import('../src/postgres'), + import('../src/questdb'), + import('../src/mongodb'), + import('../src/logging') + ]); + // Note: Due to module caching, environment is set at first import + // All modules should detect the same environment (which will be Testing due to test setup) + expect(getEnvironment()).toBe(Environment.Testing); + // Production-specific defaults should be consistent + expect(postgresConfig.POSTGRES_SSL).toBe(false); // default is false unless overridden expect(questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // checking actual property name + expect(mongodbConfig.MONGODB_TLS).toBe(false); // checking actual property name + expect(loggingConfig.LOG_FORMAT).toBe('json'); + }); + }); + + describe('Main Index Exports', () => { test('should export all configuration objects from index', async () => { + setTestEnv(getMinimalTestEnv()); + + const config = await import('../src/index'); + + // Core utilities (no coreConfig object) + expect(config.Environment).toBeDefined(); + expect(config.getEnvironment).toBeDefined(); + expect(config.ConfigurationError).toBeDefined(); + + // Configuration objects + expect(config.postgresConfig).toBeDefined(); + expect(config.questdbConfig).toBeDefined(); + expect(config.mongodbConfig).toBeDefined(); + expect(config.loggingConfig).toBeDefined(); + expect(config.riskConfig).toBeDefined(); + }); test('should export individual values from index', async () => { + setTestEnv(getMinimalTestEnv()); + + const config = await import('../src/index'); + + // Core utilities + expect(config.Environment).toBeDefined(); + expect(config.getEnvironment).toBeDefined(); + + // Individual configuration values exported from modules + expect(config.POSTGRES_HOST).toBeDefined(); + expect(config.POSTGRES_PORT).toBeDefined(); + expect(config.QUESTDB_HOST).toBeDefined(); + expect(config.MONGODB_HOST).toBeDefined(); + + // Risk values + expect(config.RISK_MAX_POSITION_SIZE).toBeDefined(); + expect(config.RISK_MAX_DAILY_LOSS).toBeDefined(); + + // Logging values + expect(config.LOG_LEVEL).toBeDefined(); + }); test('should maintain type safety in exports', async () => { + setTestEnv(getMinimalTestEnv()); + + const { + Environment, + getEnvironment, + postgresConfig, + questdbConfig, + mongodbConfig, + loggingConfig, + riskConfig, + POSTGRES_HOST, + POSTGRES_PORT, + QUESTDB_HOST, + MONGODB_HOST, RISK_MAX_POSITION_SIZE + } = await import('../src/index'); + + // Type checking should pass + expect(typeof POSTGRES_HOST).toBe('string'); + expect(typeof POSTGRES_PORT).toBe('number'); + expect(typeof QUESTDB_HOST).toBe('string'); + expect(typeof MONGODB_HOST).toBe('string'); + expect(typeof RISK_MAX_POSITION_SIZE).toBe('number'); + + // Configuration objects should have expected shapes + expect(postgresConfig).toHaveProperty('POSTGRES_HOST'); + expect(postgresConfig).toHaveProperty('POSTGRES_PORT'); + expect(questdbConfig).toHaveProperty('QUESTDB_HOST'); + expect(mongodbConfig).toHaveProperty('MONGODB_HOST'); + expect(loggingConfig).toHaveProperty('LOG_LEVEL'); + expect(riskConfig).toHaveProperty('RISK_MAX_POSITION_SIZE'); + }); + }); + describe('Environment Variable Validation', () => { + test('should validate environment variables across all modules', async () => { + setTestEnv({ + NODE_ENV: 'test', + LOG_LEVEL: 'info', // valid level + POSTGRES_HOST: 'localhost', + POSTGRES_DATABASE: 'test', + POSTGRES_USERNAME: 'test', + POSTGRES_PASSWORD: 'test', + QUESTDB_HOST: 'localhost', + MONGODB_HOST: 'localhost', + MONGODB_DATABASE: 'test', + RISK_MAX_POSITION_SIZE: '0.1', + RISK_MAX_DAILY_LOSS: '0.05' + }); // All imports should succeed with valid config + const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([ + import('../src/core'), + import('../src/postgres'), + import('../src/questdb'), + import('../src/mongodb'), + import('../src/logging'), + import('../src/risk') + ]); + + expect(core.getEnvironment()).toBe(core.Environment.Testing); // default test env + expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost'); + expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost'); + expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost'); + expect(logging.loggingConfig.LOG_LEVEL).toBe('info'); // set in test + expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // from test env + }); test('should accept valid environment variables across all modules', async () => { + setTestEnv({ + NODE_ENV: 'development', + LOG_LEVEL: 'debug', + + POSTGRES_HOST: 'localhost', + POSTGRES_PORT: '5432', + POSTGRES_DATABASE: 'stockbot_dev', + POSTGRES_USERNAME: 'dev_user', + POSTGRES_PASSWORD: 'dev_pass', + POSTGRES_SSL: 'false', + + QUESTDB_HOST: 'localhost', + QUESTDB_HTTP_PORT: '9000', + QUESTDB_PG_PORT: '8812', + + MONGODB_HOST: 'localhost', + MONGODB_DATABASE: 'stockbot_dev', + + RISK_MAX_POSITION_SIZE: '0.25', + RISK_MAX_DAILY_LOSS: '0.025', + + LOG_FORMAT: 'json', + LOG_FILE_ENABLED: 'false' + }); + + // All imports should succeed + const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([ + import('../src/core'), + import('../src/postgres'), + import('../src/questdb'), + import('../src/mongodb'), + import('../src/logging'), + import('../src/risk') + ]); + + // Since this is the first test to set NODE_ENV to development and modules might not be cached yet, + // this could actually change the environment. Let's test what we actually get. + expect(core.getEnvironment()).toBeDefined(); // Just verify it returns something valid + expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost'); + expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost'); + expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost'); + expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default value + expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value + }); + }); + + describe('Configuration Consistency', () => { test('should maintain consistent SSL settings across databases', async () => { + setTestEnv({ + NODE_ENV: 'production', + POSTGRES_HOST: 'prod-postgres.com', + POSTGRES_DATABASE: 'prod_db', + POSTGRES_USERNAME: 'prod_user', + POSTGRES_PASSWORD: 'prod_pass', + QUESTDB_HOST: 'prod-questdb.com', + MONGODB_HOST: 'prod-mongo.com', + MONGODB_DATABASE: 'prod_db', + RISK_MAX_POSITION_SIZE: '0.1', + RISK_MAX_DAILY_LOSS: '0.05' + // SSL settings not explicitly set - should use defaults + }); + + const [postgres, questdb, mongodb] = await Promise.all([ + import('../src/postgres'), + import('../src/questdb'), + import('../src/mongodb') + ]); + + // Check actual SSL property names and their default values expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default is false + expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // default is false + expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); // default is false + }); test('should maintain consistent environment detection across modules', async () => { + setTestEnv({ + NODE_ENV: 'staging', + ...getMinimalTestEnv() + }); + + const [core, logging] = await Promise.all([ + import('../src/core'), + import('../src/logging') + ]); + expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists + + // The setTestEnv call above doesn't actually change the real NODE_ENV because modules cache it + // So we check that the test setup is working correctly + expect(process.env.NODE_ENV).toBe('test'); // This is what's actually set in test environment + }); + }); + + describe('Performance and Caching', () => { test('should cache configuration values between imports', async () => { + setTestEnv(getMinimalTestEnv()); + + // Import the same module multiple times + const postgres1 = await import('../src/postgres'); + const postgres2 = await import('../src/postgres'); + const postgres3 = await import('../src/postgres'); + + // Should return the same object reference (cached) + expect(postgres1.postgresConfig).toBe(postgres2.postgresConfig); + expect(postgres2.postgresConfig).toBe(postgres3.postgresConfig); + }); + + test('should handle rapid sequential imports', async () => { + setTestEnv(getMinimalTestEnv()); + + // Import all modules simultaneously + const startTime = Date.now(); + + await Promise.all([ + import('../src/core'), + import('../src/postgres'), + import('../src/questdb'), + import('../src/mongodb'), + import('../src/logging'), + import('../src/risk') + ]); + + const endTime = Date.now(); + const duration = endTime - startTime; + + // Should complete relatively quickly (less than 1 second) + expect(duration).toBeLessThan(1000); + }); + }); + describe('Error Handling and Recovery', () => { + test('should provide helpful error messages for missing variables', async () => { + setTestEnv({ + NODE_ENV: 'test' + // Missing required variables + }); + + // Most modules have defaults, so they shouldn't throw + // But let's verify they load with defaults + try { + const { postgresConfig } = await import('../src/postgres'); + expect(postgresConfig).toBeDefined(); + expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value + } catch (error) { + // If it throws, check that error message is helpful + expect((error as Error).message).toBeTruthy(); + } + + try { + const { riskConfig } = await import('../src/risk'); + expect(riskConfig).toBeDefined(); + expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value + } catch (error) { + // If it throws, check that error message is helpful + expect((error as Error).message).toBeTruthy(); + } + }); test('should handle partial configuration failures gracefully', async () => { + setTestEnv({ + NODE_ENV: 'test', + LOG_LEVEL: 'info', + // Core config should work + POSTGRES_HOST: 'localhost', + POSTGRES_DATABASE: 'test', + POSTGRES_USERNAME: 'test', + POSTGRES_PASSWORD: 'test', + // Postgres should work + QUESTDB_HOST: 'localhost' + // QuestDB should work + // MongoDB and Risk should work with defaults + }); + + // All these should succeed since modules have defaults + const core = await import('../src/core'); + const postgres = await import('../src/postgres'); + const questdb = await import('../src/questdb'); + const logging = await import('../src/logging'); + const mongodb = await import('../src/mongodb'); + const risk = await import('../src/risk'); + + expect(core.Environment).toBeDefined(); + expect(postgres.postgresConfig).toBeDefined(); + expect(questdb.questdbConfig).toBeDefined(); + expect(logging.loggingConfig).toBeDefined(); + expect(mongodb.mongodbConfig).toBeDefined(); + expect(risk.riskConfig).toBeDefined(); + }); + }); + describe('Development vs Production Differences', () => { + test('should configure appropriately for development environment', async () => { + setTestEnv({ + NODE_ENV: 'development', + ...getMinimalTestEnv(), + POSTGRES_SSL: undefined, // Should default to false + QUESTDB_TLS_ENABLED: undefined, // Should default to false + MONGODB_TLS: undefined, // Should default to false + LOG_FORMAT: undefined, // Should default to json + RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true + }); + + const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([ + import('../src/core'), + import('../src/postgres'), + import('../src/questdb'), + import('../src/mongodb'), + import('../src/logging'), + import('../src/risk') + ]); + expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists + expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); + expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); + expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default + expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true); // default + }); + + test('should configure appropriately for production environment', async () => { + setTestEnv({ + NODE_ENV: 'production', + ...getMinimalTestEnv(), + POSTGRES_SSL: undefined, // Should default to false (same as dev) + QUESTDB_TLS_ENABLED: undefined, // Should default to false + MONGODB_TLS: undefined, // Should default to false + LOG_FORMAT: undefined, // Should default to json + RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true + }); + + const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([ + import('../src/core'), + import('../src/postgres'), + import('../src/questdb'), + import('../src/mongodb'), + import('../src/logging'), + import('../src/risk') ]); + + expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists + expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default doesn't change by env + expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); + expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); + expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); + expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true); + }); + }); +}); diff --git a/libs/config/test/setup.ts b/libs/config/test/setup.ts index 738228d..93dd5b8 100644 --- a/libs/config/test/setup.ts +++ b/libs/config/test/setup.ts @@ -1,92 +1,92 @@ -/** - * Test Setup for @stock-bot/config Library - * - * Provides common setup and utilities for testing configuration modules. - */ - -// Set NODE_ENV immediately at module load time -process.env.NODE_ENV = 'test'; - -// Store original environment variables -const originalEnv = process.env; - -// Note: Bun provides its own test globals, no need to import from @jest/globals -beforeEach(() => { - // Reset environment variables to original state - process.env = { ...originalEnv }; - // Ensure NODE_ENV is set to test by default - process.env.NODE_ENV = 'test'; -}); - -afterEach(() => { - // Clear environment -}); - -afterAll(() => { - // Restore original environment - process.env = originalEnv; -}); - -/** - * Helper function to set environment variables for testing - */ -export function setTestEnv(vars: Record): void { - Object.assign(process.env, vars); -} - -/** - * Helper function to clear specific environment variables - */ -export function clearEnvVars(vars: string[]): void { - vars.forEach(varName => { - delete process.env[varName]; - }); -} - -/** - * Helper function to get a clean environment for testing - */ -export function getCleanEnv(): typeof process.env { - return { - NODE_ENV: 'test' - }; -} - -/** - * Helper function to create minimal required environment variables - */ -export function getMinimalTestEnv(): Record { return { - NODE_ENV: 'test', - // Logging - LOG_LEVEL: 'info', // Changed from 'error' to 'info' to match test expectations - // Database - POSTGRES_HOST: 'localhost', - POSTGRES_PORT: '5432', - POSTGRES_DATABASE: 'test_db', - POSTGRES_USERNAME: 'test_user', - POSTGRES_PASSWORD: 'test_pass', - // QuestDB - QUESTDB_HOST: 'localhost', - QUESTDB_HTTP_PORT: '9000', - QUESTDB_PG_PORT: '8812', - // MongoDB - MONGODB_HOST: 'localhost', - MONGODB_PORT: '27017', - MONGODB_DATABASE: 'test_db', - MONGODB_USERNAME: 'test_user', - MONGODB_PASSWORD: 'test_pass', - // Dragonfly - DRAGONFLY_HOST: 'localhost', - DRAGONFLY_PORT: '6379', - // Monitoring - PROMETHEUS_PORT: '9090', - GRAFANA_PORT: '3000', - // Data Providers - DATA_PROVIDER_API_KEY: 'test_key', - // Risk - RISK_MAX_POSITION_SIZE: '0.1', - RISK_MAX_DAILY_LOSS: '0.05', - // Admin - ADMIN_PORT: '8080' - }; -} +/** + * Test Setup for @stock-bot/config Library + * + * Provides common setup and utilities for testing configuration modules. + */ + +// Set NODE_ENV immediately at module load time +process.env.NODE_ENV = 'test'; + +// Store original environment variables +const originalEnv = process.env; + +// Note: Bun provides its own test globals, no need to import from @jest/globals +beforeEach(() => { + // Reset environment variables to original state + process.env = { ...originalEnv }; + // Ensure NODE_ENV is set to test by default + process.env.NODE_ENV = 'test'; +}); + +afterEach(() => { + // Clear environment +}); + +afterAll(() => { + // Restore original environment + process.env = originalEnv; +}); + +/** + * Helper function to set environment variables for testing + */ +export function setTestEnv(vars: Record): void { + Object.assign(process.env, vars); +} + +/** + * Helper function to clear specific environment variables + */ +export function clearEnvVars(vars: string[]): void { + vars.forEach(varName => { + delete process.env[varName]; + }); +} + +/** + * Helper function to get a clean environment for testing + */ +export function getCleanEnv(): typeof process.env { + return { + NODE_ENV: 'test' + }; +} + +/** + * Helper function to create minimal required environment variables + */ +export function getMinimalTestEnv(): Record { return { + NODE_ENV: 'test', + // Logging + LOG_LEVEL: 'info', // Changed from 'error' to 'info' to match test expectations + // Database + POSTGRES_HOST: 'localhost', + POSTGRES_PORT: '5432', + POSTGRES_DATABASE: 'test_db', + POSTGRES_USERNAME: 'test_user', + POSTGRES_PASSWORD: 'test_pass', + // QuestDB + QUESTDB_HOST: 'localhost', + QUESTDB_HTTP_PORT: '9000', + QUESTDB_PG_PORT: '8812', + // MongoDB + MONGODB_HOST: 'localhost', + MONGODB_PORT: '27017', + MONGODB_DATABASE: 'test_db', + MONGODB_USERNAME: 'test_user', + MONGODB_PASSWORD: 'test_pass', + // Dragonfly + DRAGONFLY_HOST: 'localhost', + DRAGONFLY_PORT: '6379', + // Monitoring + PROMETHEUS_PORT: '9090', + GRAFANA_PORT: '3000', + // Data Providers + DATA_PROVIDER_API_KEY: 'test_key', + // Risk + RISK_MAX_POSITION_SIZE: '0.1', + RISK_MAX_DAILY_LOSS: '0.05', + // Admin + ADMIN_PORT: '8080' + }; +} diff --git a/libs/config/tsconfig.json b/libs/config/tsconfig.json index 145f618..1732a93 100644 --- a/libs/config/tsconfig.json +++ b/libs/config/tsconfig.json @@ -1,12 +1,12 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**/*", "**/tests/**/*"], - "references": [ - { "path": "../types" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**/*", "**/tests/**/*"], + "references": [ + { "path": "../types" } + ] +} diff --git a/libs/config/turbo.json b/libs/config/turbo.json index 54c6938..9fc641f 100644 --- a/libs/config/turbo.json +++ b/libs/config/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/config/validate-config.js b/libs/config/validate-config.js index 4a66e8f..075d35d 100644 --- a/libs/config/validate-config.js +++ b/libs/config/validate-config.js @@ -1,118 +1,118 @@ -#!/usr/bin/env node - -/** - * Configuration Validation Script - * Tests that all configuration modules can be loaded and validated - */ - -// Set test environment variables -process.env.NODE_ENV = 'test'; -process.env.PORT = '3001'; - -// Database configs -process.env.DB_HOST = 'localhost'; -process.env.DB_PORT = '5432'; -process.env.DB_NAME = 'test_db'; -process.env.DB_USER = 'test_user'; -process.env.DB_PASSWORD = 'test_pass'; - -// QuestDB configs -process.env.QUESTDB_HOST = 'localhost'; -process.env.QUESTDB_HTTP_PORT = '9000'; -process.env.QUESTDB_PG_PORT = '8812'; - -// MongoDB configs -process.env.MONGODB_HOST = 'localhost'; -process.env.MONGODB_PORT = '27017'; -process.env.MONGODB_DATABASE = 'test_db'; - -// Dragonfly configs -process.env.DRAGONFLY_HOST = 'localhost'; -process.env.DRAGONFLY_PORT = '6379'; - -// Monitoring configs -process.env.PROMETHEUS_HOST = 'localhost'; -process.env.PROMETHEUS_PORT = '9090'; -process.env.GRAFANA_HOST = 'localhost'; -process.env.GRAFANA_PORT = '3000'; - -// Loki configs -process.env.LOKI_HOST = 'localhost'; -process.env.LOKI_PORT = '3100'; - -// Logging configs -process.env.LOG_LEVEL = 'info'; -process.env.LOG_FORMAT = 'json'; - -try { - console.log('πŸ” Validating configuration modules...\n'); - - // Test each configuration module - const modules = [ - { name: 'Database', path: './dist/database.js' }, - { name: 'QuestDB', path: './dist/questdb.js' }, - { name: 'MongoDB', path: './dist/mongodb.js' }, - { name: 'Dragonfly', path: './dist/dragonfly.js' }, - { name: 'Monitoring', path: './dist/monitoring.js' }, - { name: 'Loki', path: './dist/loki.js' }, - { name: 'Logging', path: './dist/logging.js' }, - ]; - - const results = []; - - for (const module of modules) { - try { - const config = require(module.path); - const configKeys = Object.keys(config); - - if (configKeys.length === 0) { - throw new Error('No exported configuration found'); - } - - // Try to access the main config object - const mainConfig = config[configKeys[0]]; - if (!mainConfig || typeof mainConfig !== 'object') { - throw new Error('Invalid configuration object'); - } - - console.log(`βœ… ${module.name}: ${configKeys.length} config(s) loaded`); - results.push({ name: module.name, status: 'success', configs: configKeys }); - - } catch (error) { - console.log(`❌ ${module.name}: ${error.message}`); - results.push({ name: module.name, status: 'error', error: error.message }); - } - } - - // Test main index exports - try { - const indexExports = require('./dist/index.js'); - const exportCount = Object.keys(indexExports).length; - console.log(`\nβœ… Index exports: ${exportCount} modules exported`); - results.push({ name: 'Index', status: 'success', exports: exportCount }); - } catch (error) { - console.log(`\n❌ Index exports: ${error.message}`); - results.push({ name: 'Index', status: 'error', error: error.message }); - } - - // Summary - const successful = results.filter(r => r.status === 'success').length; - const total = results.length; - - console.log(`\nπŸ“Š Validation Summary:`); - console.log(` Total modules: ${total}`); - console.log(` Successful: ${successful}`); - console.log(` Failed: ${total - successful}`); - - if (successful === total) { - console.log('\nπŸŽ‰ All configuration modules validated successfully!'); - process.exit(0); - } else { - console.log('\n⚠️ Some configuration modules failed validation.'); - process.exit(1); - } - -} catch (error) { - console.error('❌ Validation script failed:', error.message); - process.exit(1); -} +#!/usr/bin/env node + +/** + * Configuration Validation Script + * Tests that all configuration modules can be loaded and validated + */ + +// Set test environment variables +process.env.NODE_ENV = 'test'; +process.env.PORT = '3001'; + +// Database configs +process.env.DB_HOST = 'localhost'; +process.env.DB_PORT = '5432'; +process.env.DB_NAME = 'test_db'; +process.env.DB_USER = 'test_user'; +process.env.DB_PASSWORD = 'test_pass'; + +// QuestDB configs +process.env.QUESTDB_HOST = 'localhost'; +process.env.QUESTDB_HTTP_PORT = '9000'; +process.env.QUESTDB_PG_PORT = '8812'; + +// MongoDB configs +process.env.MONGODB_HOST = 'localhost'; +process.env.MONGODB_PORT = '27017'; +process.env.MONGODB_DATABASE = 'test_db'; + +// Dragonfly configs +process.env.DRAGONFLY_HOST = 'localhost'; +process.env.DRAGONFLY_PORT = '6379'; + +// Monitoring configs +process.env.PROMETHEUS_HOST = 'localhost'; +process.env.PROMETHEUS_PORT = '9090'; +process.env.GRAFANA_HOST = 'localhost'; +process.env.GRAFANA_PORT = '3000'; + +// Loki configs +process.env.LOKI_HOST = 'localhost'; +process.env.LOKI_PORT = '3100'; + +// Logging configs +process.env.LOG_LEVEL = 'info'; +process.env.LOG_FORMAT = 'json'; + +try { + console.log('πŸ” Validating configuration modules...\n'); + + // Test each configuration module + const modules = [ + { name: 'Database', path: './dist/database.js' }, + { name: 'QuestDB', path: './dist/questdb.js' }, + { name: 'MongoDB', path: './dist/mongodb.js' }, + { name: 'Dragonfly', path: './dist/dragonfly.js' }, + { name: 'Monitoring', path: './dist/monitoring.js' }, + { name: 'Loki', path: './dist/loki.js' }, + { name: 'Logging', path: './dist/logging.js' }, + ]; + + const results = []; + + for (const module of modules) { + try { + const config = require(module.path); + const configKeys = Object.keys(config); + + if (configKeys.length === 0) { + throw new Error('No exported configuration found'); + } + + // Try to access the main config object + const mainConfig = config[configKeys[0]]; + if (!mainConfig || typeof mainConfig !== 'object') { + throw new Error('Invalid configuration object'); + } + + console.log(`βœ… ${module.name}: ${configKeys.length} config(s) loaded`); + results.push({ name: module.name, status: 'success', configs: configKeys }); + + } catch (error) { + console.log(`❌ ${module.name}: ${error.message}`); + results.push({ name: module.name, status: 'error', error: error.message }); + } + } + + // Test main index exports + try { + const indexExports = require('./dist/index.js'); + const exportCount = Object.keys(indexExports).length; + console.log(`\nβœ… Index exports: ${exportCount} modules exported`); + results.push({ name: 'Index', status: 'success', exports: exportCount }); + } catch (error) { + console.log(`\n❌ Index exports: ${error.message}`); + results.push({ name: 'Index', status: 'error', error: error.message }); + } + + // Summary + const successful = results.filter(r => r.status === 'success').length; + const total = results.length; + + console.log(`\nπŸ“Š Validation Summary:`); + console.log(` Total modules: ${total}`); + console.log(` Successful: ${successful}`); + console.log(` Failed: ${total - successful}`); + + if (successful === total) { + console.log('\nπŸŽ‰ All configuration modules validated successfully!'); + process.exit(0); + } else { + console.log('\n⚠️ Some configuration modules failed validation.'); + process.exit(1); + } + +} catch (error) { + console.error('❌ Validation script failed:', error.message); + process.exit(1); +} diff --git a/libs/data-adjustments/package.json b/libs/data-adjustments/package.json index a41bdd9..486114e 100644 --- a/libs/data-adjustments/package.json +++ b/libs/data-adjustments/package.json @@ -1,24 +1,24 @@ -{ - "name": "@stock-bot/data-adjustments", - "version": "1.0.0", - "description": "Stock split and dividend adjustment utilities for market data", - "type": "module", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "scripts": { - "build": "tsc", - "test": "bun test", - "test:watch": "bun test --watch" - }, - "dependencies": { - "@stock-bot/types": "*", - "@stock-bot/logger": "*" - }, - "devDependencies": { - "typescript": "^5.4.5", - "bun-types": "^1.1.12" - }, - "peerDependencies": { - "typescript": "^5.0.0" - } -} +{ + "name": "@stock-bot/data-adjustments", + "version": "1.0.0", + "description": "Stock split and dividend adjustment utilities for market data", + "type": "module", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "build": "tsc", + "test": "bun test", + "test:watch": "bun test --watch" + }, + "dependencies": { + "@stock-bot/types": "*", + "@stock-bot/logger": "*" + }, + "devDependencies": { + "typescript": "^5.4.5", + "bun-types": "^1.1.12" + }, + "peerDependencies": { + "typescript": "^5.0.0" + } +} diff --git a/libs/data-frame/package.json b/libs/data-frame/package.json index 3048a03..4adafeb 100644 --- a/libs/data-frame/package.json +++ b/libs/data-frame/package.json @@ -1,33 +1,33 @@ -{ - "name": "@stock-bot/data-frame", - "version": "1.0.0", - "description": "DataFrame library for time series data manipulation", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "test": "bun test", - "clean": "rimraf dist" - }, - "dependencies": { - "@stock-bot/logger": "*", - "@stock-bot/utils": "*" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "bun-types": "^1.2.15" - }, - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] -} +{ + "name": "@stock-bot/data-frame", + "version": "1.0.0", + "description": "DataFrame library for time series data manipulation", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "test": "bun test", + "clean": "rimraf dist" + }, + "dependencies": { + "@stock-bot/logger": "*", + "@stock-bot/utils": "*" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "bun-types": "^1.2.15" + }, + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] +} diff --git a/libs/data-frame/src/index.ts b/libs/data-frame/src/index.ts index a89b407..1c262d2 100644 --- a/libs/data-frame/src/index.ts +++ b/libs/data-frame/src/index.ts @@ -1,485 +1,485 @@ -import { getLogger } from '@stock-bot/logger'; - -export interface DataFrameRow { - [key: string]: any; -} - -export interface DataFrameOptions { - index?: string; - columns?: string[]; - dtypes?: Record; -} - -export interface GroupByResult { - [key: string]: DataFrame; -} - -export interface AggregationFunction { - (values: any[]): any; -} - -export class DataFrame { - private data: DataFrameRow[]; - private _columns: string[]; - private _index: string; - private _dtypes: Record; - private logger = getLogger('dataframe'); - - constructor(data: DataFrameRow[] = [], options: DataFrameOptions = {}) { - this.data = [...data]; - this._index = options.index || 'index'; - this._columns = options.columns || this.inferColumns(); - this._dtypes = options.dtypes || {}; - - this.validateAndCleanData(); - } - - private inferColumns(): string[] { - if (this.data.length === 0) return []; - - const columns = new Set(); - for (const row of this.data) { - Object.keys(row).forEach(key => columns.add(key)); - } - - return Array.from(columns).sort(); - } - - private validateAndCleanData(): void { - if (this.data.length === 0) return; - - // Ensure all rows have the same columns - for (let i = 0; i < this.data.length; i++) { - const row = this.data[i]; - - // Add missing columns with null values - for (const col of this._columns) { - if (!(col in row)) { - row[col] = null; - } - } - - // Apply data type conversions - for (const [col, dtype] of Object.entries(this._dtypes)) { - if (col in row && row[col] !== null) { - row[col] = this.convertValue(row[col], dtype); - } - } - } - } - - private convertValue(value: any, dtype: string): any { - switch (dtype) { - case 'number': - return typeof value === 'number' ? value : parseFloat(value); - case 'string': - return String(value); - case 'boolean': - return Boolean(value); - case 'date': - return value instanceof Date ? value : new Date(value); - default: - return value; - } - } - - // Basic properties - get columns(): string[] { - return [...this._columns]; - } - - get index(): string { - return this._index; - } - - get length(): number { - return this.data.length; - } - - get shape(): [number, number] { - return [this.data.length, this._columns.length]; - } - - get empty(): boolean { - return this.data.length === 0; - } - - // Data access methods - head(n: number = 5): DataFrame { - return new DataFrame(this.data.slice(0, n), { - columns: this._columns, - index: this._index, - dtypes: this._dtypes - }); - } - - tail(n: number = 5): DataFrame { - return new DataFrame(this.data.slice(-n), { - columns: this._columns, - index: this._index, - dtypes: this._dtypes - }); - } - - iloc(start: number, end?: number): DataFrame { - const slice = end !== undefined ? this.data.slice(start, end) : this.data.slice(start); - return new DataFrame(slice, { - columns: this._columns, - index: this._index, - dtypes: this._dtypes - }); - } - - at(index: number, column: string): any { - if (index < 0 || index >= this.data.length) { - throw new Error(`Index ${index} out of bounds`); - } - return this.data[index][column]; - } - - // Column operations - select(columns: string[]): DataFrame { - const validColumns = columns.filter(col => this._columns.includes(col)); - const newData = this.data.map(row => { - const newRow: DataFrameRow = {}; - for (const col of validColumns) { - newRow[col] = row[col]; - } - return newRow; - }); - - return new DataFrame(newData, { - columns: validColumns, - index: this._index, - dtypes: this.filterDtypes(validColumns) - }); - } - - drop(columns: string[]): DataFrame { - const remainingColumns = this._columns.filter(col => !columns.includes(col)); - return this.select(remainingColumns); - } - - getColumn(column: string): any[] { - if (!this._columns.includes(column)) { - throw new Error(`Column '${column}' not found`); - } - return this.data.map(row => row[column]); - } - - setColumn(column: string, values: any[]): DataFrame { - if (values.length !== this.data.length) { - throw new Error('Values length must match DataFrame length'); - } - - const newData = this.data.map((row, index) => ({ - ...row, - [column]: values[index] - })); - - const newColumns = this._columns.includes(column) - ? this._columns - : [...this._columns, column]; - - return new DataFrame(newData, { - columns: newColumns, - index: this._index, - dtypes: this._dtypes - }); - } - - // Filtering - filter(predicate: (row: DataFrameRow, index: number) => boolean): DataFrame { - const filteredData = this.data.filter(predicate); - return new DataFrame(filteredData, { - columns: this._columns, - index: this._index, - dtypes: this._dtypes - }); - } - - where(column: string, operator: '>' | '<' | '>=' | '<=' | '==' | '!=', value: any): DataFrame { - return this.filter(row => { - const cellValue = row[column]; - switch (operator) { - case '>': return cellValue > value; - case '<': return cellValue < value; - case '>=': return cellValue >= value; - case '<=': return cellValue <= value; - case '==': return cellValue === value; - case '!=': return cellValue !== value; - default: return false; - } - }); - } - - // Sorting - sort(column: string, ascending: boolean = true): DataFrame { - const sortedData = [...this.data].sort((a, b) => { - const aVal = a[column]; - const bVal = b[column]; - - if (aVal === bVal) return 0; - - const comparison = aVal > bVal ? 1 : -1; - return ascending ? comparison : -comparison; - }); - - return new DataFrame(sortedData, { - columns: this._columns, - index: this._index, - dtypes: this._dtypes - }); - } - - // Aggregation - groupBy(column: string): GroupByResult { - const groups: Record = {}; - - for (const row of this.data) { - const key = String(row[column]); - if (!groups[key]) { - groups[key] = []; - } - groups[key].push(row); - } - - const result: GroupByResult = {}; - for (const [key, rows] of Object.entries(groups)) { - result[key] = new DataFrame(rows, { - columns: this._columns, - index: this._index, - dtypes: this._dtypes - }); - } - - return result; - } - - agg(aggregations: Record): DataFrameRow { - const result: DataFrameRow = {}; - - for (const [column, func] of Object.entries(aggregations)) { - if (!this._columns.includes(column)) { - throw new Error(`Column '${column}' not found`); - } - - const values = this.getColumn(column).filter(val => val !== null && val !== undefined); - result[column] = func(values); - } - - return result; - } - - // Statistical methods - mean(column: string): number { - const values = this.getColumn(column).filter(val => typeof val === 'number'); - return values.reduce((sum, val) => sum + val, 0) / values.length; - } - - sum(column: string): number { - const values = this.getColumn(column).filter(val => typeof val === 'number'); - return values.reduce((sum, val) => sum + val, 0); - } - - min(column: string): number { - const values = this.getColumn(column).filter(val => typeof val === 'number'); - return Math.min(...values); - } - - max(column: string): number { - const values = this.getColumn(column).filter(val => typeof val === 'number'); - return Math.max(...values); - } - - std(column: string): number { - const values = this.getColumn(column).filter(val => typeof val === 'number'); - const mean = values.reduce((sum, val) => sum + val, 0) / values.length; - const variance = values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / values.length; - return Math.sqrt(variance); - } - - // Time series specific methods - resample(timeColumn: string, frequency: string): DataFrame { - // Simple resampling implementation - // For production, you'd want more sophisticated time-based grouping - const sorted = this.sort(timeColumn); - - switch (frequency) { - case '1H': - return this.resampleByHour(sorted, timeColumn); - case '1D': - return this.resampleByDay(sorted, timeColumn); - default: - throw new Error(`Unsupported frequency: ${frequency}`); - } - } - - private resampleByHour(sorted: DataFrame, timeColumn: string): DataFrame { - const groups: Record = {}; - - for (const row of sorted.data) { - const date = new Date(row[timeColumn]); - const hourKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}-${date.getHours()}`; - - if (!groups[hourKey]) { - groups[hourKey] = []; - } - groups[hourKey].push(row); - } - - const aggregatedData: DataFrameRow[] = []; - for (const [key, rows] of Object.entries(groups)) { - const tempDf = new DataFrame(rows, { - columns: this._columns, - index: this._index, - dtypes: this._dtypes - }); - - // Create OHLCV aggregation - const aggregated: DataFrameRow = { - [timeColumn]: rows[0][timeColumn], - open: rows[0].close || rows[0].price, - high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'), - low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'), - close: rows[rows.length - 1].close || rows[rows.length - 1].price, - volume: tempDf.sum('volume') || 0 - }; - - aggregatedData.push(aggregated); - } - - return new DataFrame(aggregatedData); - } - - private resampleByDay(sorted: DataFrame, timeColumn: string): DataFrame { - // Similar to resampleByHour but group by day - const groups: Record = {}; - - for (const row of sorted.data) { - const date = new Date(row[timeColumn]); - const dayKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}`; - - if (!groups[dayKey]) { - groups[dayKey] = []; - } - groups[dayKey].push(row); - } - - const aggregatedData: DataFrameRow[] = []; - for (const [key, rows] of Object.entries(groups)) { - const tempDf = new DataFrame(rows, { - columns: this._columns, - index: this._index, - dtypes: this._dtypes - }); - - const aggregated: DataFrameRow = { - [timeColumn]: rows[0][timeColumn], - open: rows[0].close || rows[0].price, - high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'), - low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'), - close: rows[rows.length - 1].close || rows[rows.length - 1].price, - volume: tempDf.sum('volume') || 0 - }; - - aggregatedData.push(aggregated); - } - - return new DataFrame(aggregatedData); - } - - // Utility methods - copy(): DataFrame { - return new DataFrame(this.data.map(row => ({ ...row })), { - columns: this._columns, - index: this._index, - dtypes: { ...this._dtypes } - }); - } - - concat(other: DataFrame): DataFrame { - const combinedData = [...this.data, ...other.data]; - const combinedColumns = Array.from(new Set([...this._columns, ...other._columns])); - - return new DataFrame(combinedData, { - columns: combinedColumns, - index: this._index, - dtypes: { ...this._dtypes, ...other._dtypes } - }); - } - - toArray(): DataFrameRow[] { - return this.data.map(row => ({ ...row })); - } - - toJSON(): string { - return JSON.stringify(this.data); - } - - private filterDtypes(columns: string[]): Record { - const filtered: Record = {}; - for (const col of columns) { - if (this._dtypes[col]) { - filtered[col] = this._dtypes[col]; - } - } - return filtered; - } - - // Display method - toString(): string { - if (this.empty) { - return 'Empty DataFrame'; - } - - const maxRows = 10; - const displayData = this.data.slice(0, maxRows); - - let result = `DataFrame (${this.length} rows x ${this._columns.length} columns)\n`; - result += this._columns.join('\t') + '\n'; - result += '-'.repeat(this._columns.join('\t').length) + '\n'; - - for (const row of displayData) { - const values = this._columns.map(col => String(row[col] ?? 'null')); - result += values.join('\t') + '\n'; - } - - if (this.length > maxRows) { - result += `... (${this.length - maxRows} more rows)\n`; - } - - return result; - } -} - -// Factory functions -export function createDataFrame(data: DataFrameRow[], options?: DataFrameOptions): DataFrame { - return new DataFrame(data, options); -} - -export function readCSV(csvData: string, options?: DataFrameOptions): DataFrame { - const lines = csvData.trim().split('\n'); - if (lines.length === 0) { - return new DataFrame(); - } - - const headers = lines[0].split(',').map(h => h.trim()); - const data: DataFrameRow[] = []; - - for (let i = 1; i < lines.length; i++) { - const values = lines[i].split(',').map(v => v.trim()); - const row: DataFrameRow = {}; - - for (let j = 0; j < headers.length; j++) { - row[headers[j]] = values[j] || null; - } - - data.push(row); - } - - return new DataFrame(data, { - columns: headers, - ...options - }); +import { getLogger } from '@stock-bot/logger'; + +export interface DataFrameRow { + [key: string]: any; +} + +export interface DataFrameOptions { + index?: string; + columns?: string[]; + dtypes?: Record; +} + +export interface GroupByResult { + [key: string]: DataFrame; +} + +export interface AggregationFunction { + (values: any[]): any; +} + +export class DataFrame { + private data: DataFrameRow[]; + private _columns: string[]; + private _index: string; + private _dtypes: Record; + private logger = getLogger('dataframe'); + + constructor(data: DataFrameRow[] = [], options: DataFrameOptions = {}) { + this.data = [...data]; + this._index = options.index || 'index'; + this._columns = options.columns || this.inferColumns(); + this._dtypes = options.dtypes || {}; + + this.validateAndCleanData(); + } + + private inferColumns(): string[] { + if (this.data.length === 0) return []; + + const columns = new Set(); + for (const row of this.data) { + Object.keys(row).forEach(key => columns.add(key)); + } + + return Array.from(columns).sort(); + } + + private validateAndCleanData(): void { + if (this.data.length === 0) return; + + // Ensure all rows have the same columns + for (let i = 0; i < this.data.length; i++) { + const row = this.data[i]; + + // Add missing columns with null values + for (const col of this._columns) { + if (!(col in row)) { + row[col] = null; + } + } + + // Apply data type conversions + for (const [col, dtype] of Object.entries(this._dtypes)) { + if (col in row && row[col] !== null) { + row[col] = this.convertValue(row[col], dtype); + } + } + } + } + + private convertValue(value: any, dtype: string): any { + switch (dtype) { + case 'number': + return typeof value === 'number' ? value : parseFloat(value); + case 'string': + return String(value); + case 'boolean': + return Boolean(value); + case 'date': + return value instanceof Date ? value : new Date(value); + default: + return value; + } + } + + // Basic properties + get columns(): string[] { + return [...this._columns]; + } + + get index(): string { + return this._index; + } + + get length(): number { + return this.data.length; + } + + get shape(): [number, number] { + return [this.data.length, this._columns.length]; + } + + get empty(): boolean { + return this.data.length === 0; + } + + // Data access methods + head(n: number = 5): DataFrame { + return new DataFrame(this.data.slice(0, n), { + columns: this._columns, + index: this._index, + dtypes: this._dtypes + }); + } + + tail(n: number = 5): DataFrame { + return new DataFrame(this.data.slice(-n), { + columns: this._columns, + index: this._index, + dtypes: this._dtypes + }); + } + + iloc(start: number, end?: number): DataFrame { + const slice = end !== undefined ? this.data.slice(start, end) : this.data.slice(start); + return new DataFrame(slice, { + columns: this._columns, + index: this._index, + dtypes: this._dtypes + }); + } + + at(index: number, column: string): any { + if (index < 0 || index >= this.data.length) { + throw new Error(`Index ${index} out of bounds`); + } + return this.data[index][column]; + } + + // Column operations + select(columns: string[]): DataFrame { + const validColumns = columns.filter(col => this._columns.includes(col)); + const newData = this.data.map(row => { + const newRow: DataFrameRow = {}; + for (const col of validColumns) { + newRow[col] = row[col]; + } + return newRow; + }); + + return new DataFrame(newData, { + columns: validColumns, + index: this._index, + dtypes: this.filterDtypes(validColumns) + }); + } + + drop(columns: string[]): DataFrame { + const remainingColumns = this._columns.filter(col => !columns.includes(col)); + return this.select(remainingColumns); + } + + getColumn(column: string): any[] { + if (!this._columns.includes(column)) { + throw new Error(`Column '${column}' not found`); + } + return this.data.map(row => row[column]); + } + + setColumn(column: string, values: any[]): DataFrame { + if (values.length !== this.data.length) { + throw new Error('Values length must match DataFrame length'); + } + + const newData = this.data.map((row, index) => ({ + ...row, + [column]: values[index] + })); + + const newColumns = this._columns.includes(column) + ? this._columns + : [...this._columns, column]; + + return new DataFrame(newData, { + columns: newColumns, + index: this._index, + dtypes: this._dtypes + }); + } + + // Filtering + filter(predicate: (row: DataFrameRow, index: number) => boolean): DataFrame { + const filteredData = this.data.filter(predicate); + return new DataFrame(filteredData, { + columns: this._columns, + index: this._index, + dtypes: this._dtypes + }); + } + + where(column: string, operator: '>' | '<' | '>=' | '<=' | '==' | '!=', value: any): DataFrame { + return this.filter(row => { + const cellValue = row[column]; + switch (operator) { + case '>': return cellValue > value; + case '<': return cellValue < value; + case '>=': return cellValue >= value; + case '<=': return cellValue <= value; + case '==': return cellValue === value; + case '!=': return cellValue !== value; + default: return false; + } + }); + } + + // Sorting + sort(column: string, ascending: boolean = true): DataFrame { + const sortedData = [...this.data].sort((a, b) => { + const aVal = a[column]; + const bVal = b[column]; + + if (aVal === bVal) return 0; + + const comparison = aVal > bVal ? 1 : -1; + return ascending ? comparison : -comparison; + }); + + return new DataFrame(sortedData, { + columns: this._columns, + index: this._index, + dtypes: this._dtypes + }); + } + + // Aggregation + groupBy(column: string): GroupByResult { + const groups: Record = {}; + + for (const row of this.data) { + const key = String(row[column]); + if (!groups[key]) { + groups[key] = []; + } + groups[key].push(row); + } + + const result: GroupByResult = {}; + for (const [key, rows] of Object.entries(groups)) { + result[key] = new DataFrame(rows, { + columns: this._columns, + index: this._index, + dtypes: this._dtypes + }); + } + + return result; + } + + agg(aggregations: Record): DataFrameRow { + const result: DataFrameRow = {}; + + for (const [column, func] of Object.entries(aggregations)) { + if (!this._columns.includes(column)) { + throw new Error(`Column '${column}' not found`); + } + + const values = this.getColumn(column).filter(val => val !== null && val !== undefined); + result[column] = func(values); + } + + return result; + } + + // Statistical methods + mean(column: string): number { + const values = this.getColumn(column).filter(val => typeof val === 'number'); + return values.reduce((sum, val) => sum + val, 0) / values.length; + } + + sum(column: string): number { + const values = this.getColumn(column).filter(val => typeof val === 'number'); + return values.reduce((sum, val) => sum + val, 0); + } + + min(column: string): number { + const values = this.getColumn(column).filter(val => typeof val === 'number'); + return Math.min(...values); + } + + max(column: string): number { + const values = this.getColumn(column).filter(val => typeof val === 'number'); + return Math.max(...values); + } + + std(column: string): number { + const values = this.getColumn(column).filter(val => typeof val === 'number'); + const mean = values.reduce((sum, val) => sum + val, 0) / values.length; + const variance = values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / values.length; + return Math.sqrt(variance); + } + + // Time series specific methods + resample(timeColumn: string, frequency: string): DataFrame { + // Simple resampling implementation + // For production, you'd want more sophisticated time-based grouping + const sorted = this.sort(timeColumn); + + switch (frequency) { + case '1H': + return this.resampleByHour(sorted, timeColumn); + case '1D': + return this.resampleByDay(sorted, timeColumn); + default: + throw new Error(`Unsupported frequency: ${frequency}`); + } + } + + private resampleByHour(sorted: DataFrame, timeColumn: string): DataFrame { + const groups: Record = {}; + + for (const row of sorted.data) { + const date = new Date(row[timeColumn]); + const hourKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}-${date.getHours()}`; + + if (!groups[hourKey]) { + groups[hourKey] = []; + } + groups[hourKey].push(row); + } + + const aggregatedData: DataFrameRow[] = []; + for (const [key, rows] of Object.entries(groups)) { + const tempDf = new DataFrame(rows, { + columns: this._columns, + index: this._index, + dtypes: this._dtypes + }); + + // Create OHLCV aggregation + const aggregated: DataFrameRow = { + [timeColumn]: rows[0][timeColumn], + open: rows[0].close || rows[0].price, + high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'), + low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'), + close: rows[rows.length - 1].close || rows[rows.length - 1].price, + volume: tempDf.sum('volume') || 0 + }; + + aggregatedData.push(aggregated); + } + + return new DataFrame(aggregatedData); + } + + private resampleByDay(sorted: DataFrame, timeColumn: string): DataFrame { + // Similar to resampleByHour but group by day + const groups: Record = {}; + + for (const row of sorted.data) { + const date = new Date(row[timeColumn]); + const dayKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}`; + + if (!groups[dayKey]) { + groups[dayKey] = []; + } + groups[dayKey].push(row); + } + + const aggregatedData: DataFrameRow[] = []; + for (const [key, rows] of Object.entries(groups)) { + const tempDf = new DataFrame(rows, { + columns: this._columns, + index: this._index, + dtypes: this._dtypes + }); + + const aggregated: DataFrameRow = { + [timeColumn]: rows[0][timeColumn], + open: rows[0].close || rows[0].price, + high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'), + low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'), + close: rows[rows.length - 1].close || rows[rows.length - 1].price, + volume: tempDf.sum('volume') || 0 + }; + + aggregatedData.push(aggregated); + } + + return new DataFrame(aggregatedData); + } + + // Utility methods + copy(): DataFrame { + return new DataFrame(this.data.map(row => ({ ...row })), { + columns: this._columns, + index: this._index, + dtypes: { ...this._dtypes } + }); + } + + concat(other: DataFrame): DataFrame { + const combinedData = [...this.data, ...other.data]; + const combinedColumns = Array.from(new Set([...this._columns, ...other._columns])); + + return new DataFrame(combinedData, { + columns: combinedColumns, + index: this._index, + dtypes: { ...this._dtypes, ...other._dtypes } + }); + } + + toArray(): DataFrameRow[] { + return this.data.map(row => ({ ...row })); + } + + toJSON(): string { + return JSON.stringify(this.data); + } + + private filterDtypes(columns: string[]): Record { + const filtered: Record = {}; + for (const col of columns) { + if (this._dtypes[col]) { + filtered[col] = this._dtypes[col]; + } + } + return filtered; + } + + // Display method + toString(): string { + if (this.empty) { + return 'Empty DataFrame'; + } + + const maxRows = 10; + const displayData = this.data.slice(0, maxRows); + + let result = `DataFrame (${this.length} rows x ${this._columns.length} columns)\n`; + result += this._columns.join('\t') + '\n'; + result += '-'.repeat(this._columns.join('\t').length) + '\n'; + + for (const row of displayData) { + const values = this._columns.map(col => String(row[col] ?? 'null')); + result += values.join('\t') + '\n'; + } + + if (this.length > maxRows) { + result += `... (${this.length - maxRows} more rows)\n`; + } + + return result; + } +} + +// Factory functions +export function createDataFrame(data: DataFrameRow[], options?: DataFrameOptions): DataFrame { + return new DataFrame(data, options); +} + +export function readCSV(csvData: string, options?: DataFrameOptions): DataFrame { + const lines = csvData.trim().split('\n'); + if (lines.length === 0) { + return new DataFrame(); + } + + const headers = lines[0].split(',').map(h => h.trim()); + const data: DataFrameRow[] = []; + + for (let i = 1; i < lines.length; i++) { + const values = lines[i].split(',').map(v => v.trim()); + const row: DataFrameRow = {}; + + for (let j = 0; j < headers.length; j++) { + row[headers[j]] = values[j] || null; + } + + data.push(row); + } + + return new DataFrame(data, { + columns: headers, + ...options + }); } \ No newline at end of file diff --git a/libs/data-frame/tsconfig.json b/libs/data-frame/tsconfig.json index 6d38fa2..cbda440 100644 --- a/libs/data-frame/tsconfig.json +++ b/libs/data-frame/tsconfig.json @@ -1,13 +1,13 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../logger" }, - { "path": "../utils" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../logger" }, + { "path": "../utils" } + ] +} diff --git a/libs/data-frame/turbo.json b/libs/data-frame/turbo.json index ae49d86..c5fbfeb 100644 --- a/libs/data-frame/turbo.json +++ b/libs/data-frame/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/utils#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/utils#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/event-bus/package.json b/libs/event-bus/package.json index 9aeb808..582538e 100644 --- a/libs/event-bus/package.json +++ b/libs/event-bus/package.json @@ -1,35 +1,35 @@ -{ - "name": "@stock-bot/event-bus", - "version": "1.0.0", - "description": "Event bus library for inter-service communication", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "test": "bun test", - "clean": "rimraf dist" - }, - "dependencies": { - "@stock-bot/logger": "*", - "@stock-bot/config": "*", - "ioredis": "^5.3.2", - "eventemitter3": "^5.0.1" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "bun-types": "^1.2.15" - }, - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] -} +{ + "name": "@stock-bot/event-bus", + "version": "1.0.0", + "description": "Event bus library for inter-service communication", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "test": "bun test", + "clean": "rimraf dist" + }, + "dependencies": { + "@stock-bot/logger": "*", + "@stock-bot/config": "*", + "ioredis": "^5.3.2", + "eventemitter3": "^5.0.1" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "bun-types": "^1.2.15" + }, + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] +} diff --git a/libs/event-bus/src/index.ts b/libs/event-bus/src/index.ts index 776ccbf..d8b13bd 100644 --- a/libs/event-bus/src/index.ts +++ b/libs/event-bus/src/index.ts @@ -1,550 +1,550 @@ -import { EventEmitter } from 'eventemitter3'; -import Redis from 'ioredis'; -import { getLogger } from '@stock-bot/logger'; -import { dragonflyConfig } from '@stock-bot/config'; - -export interface EventBusMessage { - id: string; - type: string; - source: string; - timestamp: number; - data: any; - metadata?: Record; -} - -export interface EventHandler { - (message: EventBusMessage & { data: T }): Promise | void; -} - -export interface EventBusOptions { - serviceName: string; - enablePersistence?: boolean; - useStreams?: boolean; - maxRetries?: number; - retryDelay?: number; -} - -export interface StreamConsumerInfo { - streamKey: string; - groupName: string; - consumerName: string; - handler: EventHandler; - isRunning: boolean; -} - -export class EventBus extends EventEmitter { - private redis: Redis; - private subscriber?: Redis; - private serviceName: string; - private logger: any; - private enablePersistence: boolean; - private useStreams: boolean; - private maxRetries: number; - private retryDelay: number; - private consumers: Map = new Map(); - private isRunning: boolean = true; - - constructor(options: EventBusOptions) { - super(); - this.serviceName = options.serviceName; - this.enablePersistence = options.enablePersistence ?? true; - this.useStreams = options.useStreams ?? true; - this.maxRetries = options.maxRetries ?? 3; - this.retryDelay = options.retryDelay ?? 1000; - this.logger = getLogger(`event-bus:${this.serviceName}`); - - this.redis = new Redis({ - host: dragonflyConfig.DRAGONFLY_HOST, - port: dragonflyConfig.DRAGONFLY_PORT, - password: dragonflyConfig.DRAGONFLY_PASSWORD, - db: dragonflyConfig.DRAGONFLY_DATABASE || 0, - maxRetriesPerRequest: dragonflyConfig.DRAGONFLY_MAX_RETRIES, - lazyConnect: true, - }); - - if (!this.useStreams) { - this.subscriber = new Redis({ - host: dragonflyConfig.DRAGONFLY_HOST, - port: dragonflyConfig.DRAGONFLY_PORT, - password: dragonflyConfig.DRAGONFLY_PASSWORD, - db: dragonflyConfig.DRAGONFLY_DATABASE || 0, - }); - this.subscriber.on('message', this.handleRedisMessage.bind(this)); - } - - this.logger.info(`Redis event bus initialized (mode: ${this.useStreams ? 'streams' : 'pub/sub'})`); - } - - private handleRedisMessage(channel: string, message: string) { - try { - const eventMessage: EventBusMessage = JSON.parse(message); - - if (eventMessage.source === this.serviceName) { - return; - } - - this.emit(eventMessage.type, eventMessage); - this.logger.debug(`Received event: ${eventMessage.type} from ${eventMessage.source}`); - } catch (error) { - this.logger.error('Failed to parse Redis message', { error, message }); - } - } - - async publish(type: string, data: T, metadata?: Record): Promise { - const message: EventBusMessage = { - id: this.generateId(), - type, - source: this.serviceName, - timestamp: Date.now(), - data, - metadata, - }; - - this.emit(type, message); - - if (this.redis && this.enablePersistence) { - try { - if (this.useStreams) { - const streamKey = `events:${type}`; - const messageId = await this.redis.xadd( - streamKey, - '*', - 'id', message.id, - 'type', message.type, - 'source', message.source, - 'timestamp', message.timestamp.toString(), - 'data', JSON.stringify(message.data), - 'metadata', JSON.stringify(message.metadata || {}) - ); - - this.logger.debug(`Published event to stream: ${type}`, { - messageId, - streamId: messageId - }); - return messageId as string; - } else { - await this.redis.publish(`events:${type}`, JSON.stringify(message)); - this.logger.debug(`Published event via pub/sub: ${type}`, { messageId: message.id }); - return message.id; - } - } catch (error) { - this.logger.error(`Failed to publish event: ${type}`, { error, messageId: message.id }); - throw error; - } - } - - return null; - } - - async subscribe(eventType: string, handler: EventHandler): Promise { - this.on(eventType, handler); - - if (this.redis && this.enablePersistence) { - try { - if (this.useStreams) { - await this.subscribeToStream(eventType, handler); - } else { - if (this.subscriber) { - await this.subscriber.subscribe(`events:${eventType}`); - this.logger.debug(`Subscribed to event: ${eventType}`); - } - } - } catch (error) { - this.logger.error(`Failed to subscribe to event: ${eventType}`, error); - throw error; - } - } - } - - private async subscribeToStream(eventType: string, handler: EventHandler): Promise { - const streamKey = `events:${eventType}`; - const groupName = `${eventType}-consumers`; - const consumerName = `${this.serviceName}-${Date.now()}`; - - try { - await this.redis.xgroup('CREATE', streamKey, groupName, '$', 'MKSTREAM'); - this.logger.debug(`Created consumer group: ${groupName} for stream: ${streamKey}`); - } catch (error: any) { - if (error.message.includes('BUSYGROUP')) { - this.logger.debug(`Consumer group already exists: ${groupName}`); - } else { - throw error; - } - } - - const consumerInfo: StreamConsumerInfo = { - streamKey, - groupName, - consumerName, - handler, - isRunning: true, - }; - - this.consumers.set(`${eventType}-${consumerName}`, consumerInfo); - this.startStreamConsumer(consumerInfo); - this.logger.debug(`Started stream consumer for: ${eventType}`); - } - - private async startStreamConsumer(consumerInfo: StreamConsumerInfo): Promise { - const { streamKey, groupName, consumerName, handler } = consumerInfo; - let retryCount = 0; - - while (consumerInfo.isRunning && this.isRunning) { - try { - await this.claimPendingMessages(streamKey, groupName, consumerName, handler); - - const messages = await this.redis.xreadgroup( - 'GROUP', groupName, consumerName, - 'COUNT', 10, - 'BLOCK', 1000, - 'STREAMS', streamKey, '>' - ); - - if (!messages || messages.length === 0) { - retryCount = 0; - continue; - } - - for (const [stream, msgs] of messages as [string, [string, string[]][]][]) { - for (const [msgId, fields] of msgs) { - await this.processStreamMessage(msgId, fields, streamKey, groupName, handler); - } - } - - retryCount = 0; - } catch (error: any) { - retryCount++; - - if (error.message.includes('NOGROUP')) { - this.logger.warn(`Consumer group deleted, recreating: ${groupName}`); - try { - await this.redis.xgroup('CREATE', streamKey, groupName, '$', 'MKSTREAM'); - retryCount = 0; - } catch (createError) { - this.logger.error('Failed to recreate consumer group:', { error: createError }); - } - } else { - this.logger.error('Error reading from stream:', { error, retryCount }); - } - - if (retryCount >= this.maxRetries) { - this.logger.error(`Max retries reached for consumer ${consumerName}, stopping`); - consumerInfo.isRunning = false; - break; - } - - const backoffDelay = Math.min(this.retryDelay * Math.pow(2, retryCount - 1), 30000); - await this.sleep(backoffDelay); - } - } - - this.logger.info(`Stream consumer stopped: ${consumerName}`); - } - - private async processStreamMessage( - msgId: string, - fields: string[], - streamKey: string, - groupName: string, - handler: EventHandler - ): Promise { - let retryCount = 0; - - while (retryCount < this.maxRetries) { - try { - const message = this.parseStreamMessage(fields); - - if (message.source === this.serviceName) { - await this.redis.xack(streamKey, groupName, msgId); - return; - } - - await handler(message); - await this.redis.xack(streamKey, groupName, msgId); - - this.logger.debug(`Processed stream message: ${msgId}`, { - eventType: message.type, - source: message.source - }); - - return; - - } catch (error) { - retryCount++; - this.logger.error(`Error processing stream message ${msgId} (attempt ${retryCount}):`, error); - - if (retryCount >= this.maxRetries) { - await this.moveToDeadLetterQueue(msgId, fields, streamKey, groupName, error); - return; - } - - await this.sleep(this.retryDelay * retryCount); - } - } - } - - private async claimPendingMessages( - streamKey: string, - groupName: string, - consumerName: string, - handler: EventHandler - ): Promise { - try { - const pendingMessages = await this.redis.xpending( - streamKey, - groupName, - '-', - '+', - 10 - ) as any[]; - - if (!pendingMessages || pendingMessages.length === 0) { - return; - } - - const oldMessages = pendingMessages.filter((msg: any[]) => { - return msg[2] > 60000; - }); - - if (oldMessages.length === 0) { - return; - } - - const messageIds = oldMessages.map((msg: any[]) => msg[0]); - const claimedMessages = await this.redis.xclaim( - streamKey, - groupName, - consumerName, - 60000, - ...messageIds - ) as [string, string[]][]; - - for (const [msgId, fields] of claimedMessages) { - await this.processStreamMessage(msgId, fields, streamKey, groupName, handler); - } - - this.logger.debug(`Claimed and processed ${claimedMessages.length} pending messages`); - } catch (error) { - this.logger.error('Error claiming pending messages:', error); - } - } - - private async moveToDeadLetterQueue( - msgId: string, - fields: string[], - streamKey: string, - groupName: string, - error: any - ): Promise { - try { - const dlqKey = `${streamKey}:dlq`; - const message = this.parseStreamMessage(fields); - - await this.redis.xadd( - dlqKey, - '*', - 'original_id', msgId, - 'original_stream', streamKey, - 'error', (error as Error).message || 'Unknown error', - 'timestamp', Date.now().toString(), - 'id', message.id, - 'type', message.type, - 'source', message.source, - 'data', JSON.stringify(message.data), - 'metadata', JSON.stringify(message.metadata || {}) - ); - - await this.redis.xack(streamKey, groupName, msgId); - - this.logger.warn(`Moved message ${msgId} to dead letter queue: ${dlqKey}`, { error: (error as Error).message }); - } catch (dlqError) { - this.logger.error(`Failed to move message ${msgId} to dead letter queue:`, { error: dlqError }); - } - } - - private parseStreamMessage(fields: string[]): EventBusMessage { - const fieldMap: Record = {}; - - for (let i = 0; i < fields.length; i += 2) { - fieldMap[fields[i]] = fields[i + 1]; - } - - return { - id: fieldMap.id, - type: fieldMap.type || 'unknown', - source: fieldMap.source, - timestamp: parseInt(fieldMap.timestamp) || Date.now(), - data: fieldMap.data ? JSON.parse(fieldMap.data) : {}, - metadata: fieldMap.metadata ? JSON.parse(fieldMap.metadata) : {}, - }; - } - - private sleep(ms: number): Promise { - return new Promise(resolve => setTimeout(resolve, ms)); - } - - async unsubscribe(eventType: string, handler?: EventHandler): Promise { - if (handler) { - this.off(eventType, handler); - } else { - this.removeAllListeners(eventType); - } - - if (this.enablePersistence) { - try { - if (this.useStreams) { - const consumersToStop = Array.from(this.consumers.entries()) - .filter(([key]) => key.startsWith(`${eventType}-`)); - - for (const [key, consumerInfo] of consumersToStop) { - consumerInfo.isRunning = false; - this.consumers.delete(key); - } - - this.logger.debug(`Stopped stream consumers for: ${eventType}`); - } else { - if (this.subscriber) { - await this.subscriber.unsubscribe(`events:${eventType}`); - this.logger.debug(`Unsubscribed from event: ${eventType}`); - } - } - } catch (error) { - this.logger.error(`Failed to unsubscribe from event: ${eventType}`, error); - } - } - } - - async close(): Promise { - this.isRunning = false; - - for (const consumerInfo of this.consumers.values()) { - consumerInfo.isRunning = false; - } - this.consumers.clear(); - - if (this.redis) { - await this.redis.quit(); - } - if (this.subscriber) { - await this.subscriber.quit(); - } - - this.removeAllListeners(); - this.logger.info('Event bus closed'); - } - - private generateId(): string { - return `${this.serviceName}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`; - } - - async getStreamInfo(eventType: string): Promise { - if (!this.useStreams) { - throw new Error('Stream info only available when using Redis Streams'); - } - - const streamKey = `events:${eventType}`; - try { - return await this.redis.xinfo('STREAM', streamKey); - } catch (error) { - this.logger.error(`Failed to get stream info for: ${eventType}`, error); - throw error; - } - } - - async getStreamLength(eventType: string): Promise { - if (!this.useStreams) { - throw new Error('Stream length only available when using Redis Streams'); - } - - const streamKey = `events:${eventType}`; - try { - return await this.redis.xlen(streamKey); - } catch (error) { - this.logger.error(`Failed to get stream length for: ${eventType}`, error); - return 0; - } - } - async readStreamHistory( - eventType: string, - startId: string = '-', - endId: string = '+', - count?: number - ): Promise { - if (!this.useStreams) { - throw new Error('Stream history only available when using Redis Streams'); - } - - const streamKey = `events:${eventType}`; - try { - let messages: [string, string[]][]; - - if (count) { - messages = await this.redis.xrange(streamKey, startId, endId, 'COUNT', count) as [string, string[]][]; - } else { - messages = await this.redis.xrange(streamKey, startId, endId) as [string, string[]][]; - } - - return messages.map(([id, fields]) => ({ - ...this.parseStreamMessage(fields), - id - })); - } catch (error) { - this.logger.error(`Failed to read stream history for: ${eventType}`, error); - return []; - } - } - - async trimStream(eventType: string, maxLength: number): Promise { - if (!this.useStreams) { - throw new Error('Stream trimming only available when using Redis Streams'); - } - - const streamKey = `events:${eventType}`; - try { - return await this.redis.xtrim(streamKey, 'MAXLEN', '~', maxLength); - } catch (error) { - this.logger.error(`Failed to trim stream: ${eventType}`, error); - return 0; - } - } - - async replayEventsFromTimestamp( - eventType: string, - fromTimestamp: number, - handler: EventHandler, - speed: number = 1 - ): Promise { - if (!this.useStreams) { - throw new Error('Event replay only available when using Redis Streams'); - } - - const events = await this.readStreamHistory(eventType); - const filteredEvents = events.filter(event => event.timestamp >= fromTimestamp); - - this.logger.info(`Replaying ${filteredEvents.length} events from ${new Date(fromTimestamp)}`); - - for (let i = 0; i < filteredEvents.length; i++) { - const event = filteredEvents[i]; - const nextEvent = filteredEvents[i + 1]; - - try { - await handler(event); - - if (nextEvent && speed > 0) { - const delay = (nextEvent.timestamp - event.timestamp) / speed; - if (delay > 0) { - await this.sleep(Math.min(delay, 1000)); - } - } - } catch (error) { - this.logger.error(`Error replaying event: ${event.id}`, error); - } - } - - this.logger.info('Event replay completed'); - } -} - -export function createEventBus(options: EventBusOptions): EventBus { - return new EventBus(options); -} +import { EventEmitter } from 'eventemitter3'; +import Redis from 'ioredis'; +import { getLogger } from '@stock-bot/logger'; +import { dragonflyConfig } from '@stock-bot/config'; + +export interface EventBusMessage { + id: string; + type: string; + source: string; + timestamp: number; + data: any; + metadata?: Record; +} + +export interface EventHandler { + (message: EventBusMessage & { data: T }): Promise | void; +} + +export interface EventBusOptions { + serviceName: string; + enablePersistence?: boolean; + useStreams?: boolean; + maxRetries?: number; + retryDelay?: number; +} + +export interface StreamConsumerInfo { + streamKey: string; + groupName: string; + consumerName: string; + handler: EventHandler; + isRunning: boolean; +} + +export class EventBus extends EventEmitter { + private redis: Redis; + private subscriber?: Redis; + private serviceName: string; + private logger: any; + private enablePersistence: boolean; + private useStreams: boolean; + private maxRetries: number; + private retryDelay: number; + private consumers: Map = new Map(); + private isRunning: boolean = true; + + constructor(options: EventBusOptions) { + super(); + this.serviceName = options.serviceName; + this.enablePersistence = options.enablePersistence ?? true; + this.useStreams = options.useStreams ?? true; + this.maxRetries = options.maxRetries ?? 3; + this.retryDelay = options.retryDelay ?? 1000; + this.logger = getLogger(`event-bus:${this.serviceName}`); + + this.redis = new Redis({ + host: dragonflyConfig.DRAGONFLY_HOST, + port: dragonflyConfig.DRAGONFLY_PORT, + password: dragonflyConfig.DRAGONFLY_PASSWORD, + db: dragonflyConfig.DRAGONFLY_DATABASE || 0, + maxRetriesPerRequest: dragonflyConfig.DRAGONFLY_MAX_RETRIES, + lazyConnect: true, + }); + + if (!this.useStreams) { + this.subscriber = new Redis({ + host: dragonflyConfig.DRAGONFLY_HOST, + port: dragonflyConfig.DRAGONFLY_PORT, + password: dragonflyConfig.DRAGONFLY_PASSWORD, + db: dragonflyConfig.DRAGONFLY_DATABASE || 0, + }); + this.subscriber.on('message', this.handleRedisMessage.bind(this)); + } + + this.logger.info(`Redis event bus initialized (mode: ${this.useStreams ? 'streams' : 'pub/sub'})`); + } + + private handleRedisMessage(channel: string, message: string) { + try { + const eventMessage: EventBusMessage = JSON.parse(message); + + if (eventMessage.source === this.serviceName) { + return; + } + + this.emit(eventMessage.type, eventMessage); + this.logger.debug(`Received event: ${eventMessage.type} from ${eventMessage.source}`); + } catch (error) { + this.logger.error('Failed to parse Redis message', { error, message }); + } + } + + async publish(type: string, data: T, metadata?: Record): Promise { + const message: EventBusMessage = { + id: this.generateId(), + type, + source: this.serviceName, + timestamp: Date.now(), + data, + metadata, + }; + + this.emit(type, message); + + if (this.redis && this.enablePersistence) { + try { + if (this.useStreams) { + const streamKey = `events:${type}`; + const messageId = await this.redis.xadd( + streamKey, + '*', + 'id', message.id, + 'type', message.type, + 'source', message.source, + 'timestamp', message.timestamp.toString(), + 'data', JSON.stringify(message.data), + 'metadata', JSON.stringify(message.metadata || {}) + ); + + this.logger.debug(`Published event to stream: ${type}`, { + messageId, + streamId: messageId + }); + return messageId as string; + } else { + await this.redis.publish(`events:${type}`, JSON.stringify(message)); + this.logger.debug(`Published event via pub/sub: ${type}`, { messageId: message.id }); + return message.id; + } + } catch (error) { + this.logger.error(`Failed to publish event: ${type}`, { error, messageId: message.id }); + throw error; + } + } + + return null; + } + + async subscribe(eventType: string, handler: EventHandler): Promise { + this.on(eventType, handler); + + if (this.redis && this.enablePersistence) { + try { + if (this.useStreams) { + await this.subscribeToStream(eventType, handler); + } else { + if (this.subscriber) { + await this.subscriber.subscribe(`events:${eventType}`); + this.logger.debug(`Subscribed to event: ${eventType}`); + } + } + } catch (error) { + this.logger.error(`Failed to subscribe to event: ${eventType}`, error); + throw error; + } + } + } + + private async subscribeToStream(eventType: string, handler: EventHandler): Promise { + const streamKey = `events:${eventType}`; + const groupName = `${eventType}-consumers`; + const consumerName = `${this.serviceName}-${Date.now()}`; + + try { + await this.redis.xgroup('CREATE', streamKey, groupName, '$', 'MKSTREAM'); + this.logger.debug(`Created consumer group: ${groupName} for stream: ${streamKey}`); + } catch (error: any) { + if (error.message.includes('BUSYGROUP')) { + this.logger.debug(`Consumer group already exists: ${groupName}`); + } else { + throw error; + } + } + + const consumerInfo: StreamConsumerInfo = { + streamKey, + groupName, + consumerName, + handler, + isRunning: true, + }; + + this.consumers.set(`${eventType}-${consumerName}`, consumerInfo); + this.startStreamConsumer(consumerInfo); + this.logger.debug(`Started stream consumer for: ${eventType}`); + } + + private async startStreamConsumer(consumerInfo: StreamConsumerInfo): Promise { + const { streamKey, groupName, consumerName, handler } = consumerInfo; + let retryCount = 0; + + while (consumerInfo.isRunning && this.isRunning) { + try { + await this.claimPendingMessages(streamKey, groupName, consumerName, handler); + + const messages = await this.redis.xreadgroup( + 'GROUP', groupName, consumerName, + 'COUNT', 10, + 'BLOCK', 1000, + 'STREAMS', streamKey, '>' + ); + + if (!messages || messages.length === 0) { + retryCount = 0; + continue; + } + + for (const [stream, msgs] of messages as [string, [string, string[]][]][]) { + for (const [msgId, fields] of msgs) { + await this.processStreamMessage(msgId, fields, streamKey, groupName, handler); + } + } + + retryCount = 0; + } catch (error: any) { + retryCount++; + + if (error.message.includes('NOGROUP')) { + this.logger.warn(`Consumer group deleted, recreating: ${groupName}`); + try { + await this.redis.xgroup('CREATE', streamKey, groupName, '$', 'MKSTREAM'); + retryCount = 0; + } catch (createError) { + this.logger.error('Failed to recreate consumer group:', { error: createError }); + } + } else { + this.logger.error('Error reading from stream:', { error, retryCount }); + } + + if (retryCount >= this.maxRetries) { + this.logger.error(`Max retries reached for consumer ${consumerName}, stopping`); + consumerInfo.isRunning = false; + break; + } + + const backoffDelay = Math.min(this.retryDelay * Math.pow(2, retryCount - 1), 30000); + await this.sleep(backoffDelay); + } + } + + this.logger.info(`Stream consumer stopped: ${consumerName}`); + } + + private async processStreamMessage( + msgId: string, + fields: string[], + streamKey: string, + groupName: string, + handler: EventHandler + ): Promise { + let retryCount = 0; + + while (retryCount < this.maxRetries) { + try { + const message = this.parseStreamMessage(fields); + + if (message.source === this.serviceName) { + await this.redis.xack(streamKey, groupName, msgId); + return; + } + + await handler(message); + await this.redis.xack(streamKey, groupName, msgId); + + this.logger.debug(`Processed stream message: ${msgId}`, { + eventType: message.type, + source: message.source + }); + + return; + + } catch (error) { + retryCount++; + this.logger.error(`Error processing stream message ${msgId} (attempt ${retryCount}):`, error); + + if (retryCount >= this.maxRetries) { + await this.moveToDeadLetterQueue(msgId, fields, streamKey, groupName, error); + return; + } + + await this.sleep(this.retryDelay * retryCount); + } + } + } + + private async claimPendingMessages( + streamKey: string, + groupName: string, + consumerName: string, + handler: EventHandler + ): Promise { + try { + const pendingMessages = await this.redis.xpending( + streamKey, + groupName, + '-', + '+', + 10 + ) as any[]; + + if (!pendingMessages || pendingMessages.length === 0) { + return; + } + + const oldMessages = pendingMessages.filter((msg: any[]) => { + return msg[2] > 60000; + }); + + if (oldMessages.length === 0) { + return; + } + + const messageIds = oldMessages.map((msg: any[]) => msg[0]); + const claimedMessages = await this.redis.xclaim( + streamKey, + groupName, + consumerName, + 60000, + ...messageIds + ) as [string, string[]][]; + + for (const [msgId, fields] of claimedMessages) { + await this.processStreamMessage(msgId, fields, streamKey, groupName, handler); + } + + this.logger.debug(`Claimed and processed ${claimedMessages.length} pending messages`); + } catch (error) { + this.logger.error('Error claiming pending messages:', error); + } + } + + private async moveToDeadLetterQueue( + msgId: string, + fields: string[], + streamKey: string, + groupName: string, + error: any + ): Promise { + try { + const dlqKey = `${streamKey}:dlq`; + const message = this.parseStreamMessage(fields); + + await this.redis.xadd( + dlqKey, + '*', + 'original_id', msgId, + 'original_stream', streamKey, + 'error', (error as Error).message || 'Unknown error', + 'timestamp', Date.now().toString(), + 'id', message.id, + 'type', message.type, + 'source', message.source, + 'data', JSON.stringify(message.data), + 'metadata', JSON.stringify(message.metadata || {}) + ); + + await this.redis.xack(streamKey, groupName, msgId); + + this.logger.warn(`Moved message ${msgId} to dead letter queue: ${dlqKey}`, { error: (error as Error).message }); + } catch (dlqError) { + this.logger.error(`Failed to move message ${msgId} to dead letter queue:`, { error: dlqError }); + } + } + + private parseStreamMessage(fields: string[]): EventBusMessage { + const fieldMap: Record = {}; + + for (let i = 0; i < fields.length; i += 2) { + fieldMap[fields[i]] = fields[i + 1]; + } + + return { + id: fieldMap.id, + type: fieldMap.type || 'unknown', + source: fieldMap.source, + timestamp: parseInt(fieldMap.timestamp) || Date.now(), + data: fieldMap.data ? JSON.parse(fieldMap.data) : {}, + metadata: fieldMap.metadata ? JSON.parse(fieldMap.metadata) : {}, + }; + } + + private sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); + } + + async unsubscribe(eventType: string, handler?: EventHandler): Promise { + if (handler) { + this.off(eventType, handler); + } else { + this.removeAllListeners(eventType); + } + + if (this.enablePersistence) { + try { + if (this.useStreams) { + const consumersToStop = Array.from(this.consumers.entries()) + .filter(([key]) => key.startsWith(`${eventType}-`)); + + for (const [key, consumerInfo] of consumersToStop) { + consumerInfo.isRunning = false; + this.consumers.delete(key); + } + + this.logger.debug(`Stopped stream consumers for: ${eventType}`); + } else { + if (this.subscriber) { + await this.subscriber.unsubscribe(`events:${eventType}`); + this.logger.debug(`Unsubscribed from event: ${eventType}`); + } + } + } catch (error) { + this.logger.error(`Failed to unsubscribe from event: ${eventType}`, error); + } + } + } + + async close(): Promise { + this.isRunning = false; + + for (const consumerInfo of this.consumers.values()) { + consumerInfo.isRunning = false; + } + this.consumers.clear(); + + if (this.redis) { + await this.redis.quit(); + } + if (this.subscriber) { + await this.subscriber.quit(); + } + + this.removeAllListeners(); + this.logger.info('Event bus closed'); + } + + private generateId(): string { + return `${this.serviceName}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`; + } + + async getStreamInfo(eventType: string): Promise { + if (!this.useStreams) { + throw new Error('Stream info only available when using Redis Streams'); + } + + const streamKey = `events:${eventType}`; + try { + return await this.redis.xinfo('STREAM', streamKey); + } catch (error) { + this.logger.error(`Failed to get stream info for: ${eventType}`, error); + throw error; + } + } + + async getStreamLength(eventType: string): Promise { + if (!this.useStreams) { + throw new Error('Stream length only available when using Redis Streams'); + } + + const streamKey = `events:${eventType}`; + try { + return await this.redis.xlen(streamKey); + } catch (error) { + this.logger.error(`Failed to get stream length for: ${eventType}`, error); + return 0; + } + } + async readStreamHistory( + eventType: string, + startId: string = '-', + endId: string = '+', + count?: number + ): Promise { + if (!this.useStreams) { + throw new Error('Stream history only available when using Redis Streams'); + } + + const streamKey = `events:${eventType}`; + try { + let messages: [string, string[]][]; + + if (count) { + messages = await this.redis.xrange(streamKey, startId, endId, 'COUNT', count) as [string, string[]][]; + } else { + messages = await this.redis.xrange(streamKey, startId, endId) as [string, string[]][]; + } + + return messages.map(([id, fields]) => ({ + ...this.parseStreamMessage(fields), + id + })); + } catch (error) { + this.logger.error(`Failed to read stream history for: ${eventType}`, error); + return []; + } + } + + async trimStream(eventType: string, maxLength: number): Promise { + if (!this.useStreams) { + throw new Error('Stream trimming only available when using Redis Streams'); + } + + const streamKey = `events:${eventType}`; + try { + return await this.redis.xtrim(streamKey, 'MAXLEN', '~', maxLength); + } catch (error) { + this.logger.error(`Failed to trim stream: ${eventType}`, error); + return 0; + } + } + + async replayEventsFromTimestamp( + eventType: string, + fromTimestamp: number, + handler: EventHandler, + speed: number = 1 + ): Promise { + if (!this.useStreams) { + throw new Error('Event replay only available when using Redis Streams'); + } + + const events = await this.readStreamHistory(eventType); + const filteredEvents = events.filter(event => event.timestamp >= fromTimestamp); + + this.logger.info(`Replaying ${filteredEvents.length} events from ${new Date(fromTimestamp)}`); + + for (let i = 0; i < filteredEvents.length; i++) { + const event = filteredEvents[i]; + const nextEvent = filteredEvents[i + 1]; + + try { + await handler(event); + + if (nextEvent && speed > 0) { + const delay = (nextEvent.timestamp - event.timestamp) / speed; + if (delay > 0) { + await this.sleep(Math.min(delay, 1000)); + } + } + } catch (error) { + this.logger.error(`Error replaying event: ${event.id}`, error); + } + } + + this.logger.info('Event replay completed'); + } +} + +export function createEventBus(options: EventBusOptions): EventBus { + return new EventBus(options); +} diff --git a/libs/event-bus/tsconfig.json b/libs/event-bus/tsconfig.json index 3030b42..e8f78e0 100644 --- a/libs/event-bus/tsconfig.json +++ b/libs/event-bus/tsconfig.json @@ -1,13 +1,13 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../config" }, - { "path": "../logger" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../config" }, + { "path": "../logger" } + ] +} diff --git a/libs/event-bus/turbo.json b/libs/event-bus/turbo.json index 7632db9..c630cca 100644 --- a/libs/event-bus/turbo.json +++ b/libs/event-bus/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/http/README.md b/libs/http/README.md index fa8b1a9..678bf15 100644 --- a/libs/http/README.md +++ b/libs/http/README.md @@ -1,283 +1,283 @@ -# HTTP Client Library - -A comprehensive HTTP client library for the Stock Bot platform with built-in support for: - -- βœ… **Fetch API** - Modern, promise-based HTTP requests -- βœ… **Proxy Support** - HTTP, HTTPS, SOCKS4, and SOCKS5 proxies -- βœ… **Rate Limiting** - Configurable request rate limiting -- βœ… **Timeout Handling** - Request timeouts with abort controllers -- βœ… **Retry Logic** - Automatic retries with exponential backoff -- βœ… **TypeScript** - Full TypeScript support with type safety -- βœ… **Logging Integration** - Optional logger integration - -## Installation - -```bash -bun add @stock-bot/http -``` - -## Basic Usage - -```typescript -import { HttpClient } from '@stock-bot/http'; - -// Create a client with default configuration -const client = new HttpClient(); - -// Make a GET request -const response = await client.get('https://api.example.com/data'); -console.log(response.data); - -// Make a POST request -const postResponse = await client.post('https://api.example.com/users', { - name: 'John Doe', - email: 'john@example.com' -}); -``` - -## Advanced Configuration - -```typescript -import { HttpClient } from '@stock-bot/http'; -import { logger } from '@stock-bot/logger'; - -const client = new HttpClient({ - baseURL: 'https://api.example.com', - timeout: 10000, // 10 seconds - retries: 3, - retryDelay: 1000, // 1 second base delay - defaultHeaders: { - 'Authorization': 'Bearer token', - 'User-Agent': 'Stock-Bot/1.0' - }, - validateStatus: (status) => status < 400 -}, logger); -``` - -## Proxy Support - -### HTTP/HTTPS Proxy - -```typescript -const client = new HttpClient({ - proxy: { - type: 'http', - host: 'proxy.example.com', - port: 8080, - username: 'user', // optional - password: 'pass' // optional - } -}); -``` - -### SOCKS Proxy - -```typescript -const client = new HttpClient({ - proxy: { - type: 'socks5', - host: 'socks-proxy.example.com', - port: 1080, - username: 'user', // optional - password: 'pass' // optional - } -}); -``` - -## Rate Limiting - -```typescript -const client = new HttpClient({ - rateLimit: { - maxRequests: 100, // Max 100 requests - windowMs: 60 * 1000, // Per 1 minute - skipSuccessfulRequests: false, - skipFailedRequests: true // Don't count failed requests - } -}); - -// Check rate limit status -const status = client.getRateLimitStatus(); -console.log(`${status.currentCount}/${status.maxRequests} requests used`); -``` - -## Request Methods - -```typescript -// GET request -const getData = await client.get('/api/data'); - -// POST request with body -const postData = await client.post('/api/users', { - name: 'John', - email: 'john@example.com' -}); - -// PUT request -const putData = await client.put('/api/users/1', updatedUser); - -// DELETE request -const deleteData = await client.delete('/api/users/1'); - -// PATCH request -const patchData = await client.patch('/api/users/1', { name: 'Jane' }); - -// Custom request -const customResponse = await client.request({ - method: 'POST', - url: '/api/custom', - headers: { 'X-Custom': 'value' }, - body: { data: 'custom' }, - timeout: 5000 -}); -``` - -## Error Handling - -```typescript -import { HttpError, TimeoutError, RateLimitError } from '@stock-bot/http'; - -try { - const response = await client.get('/api/data'); -} catch (error) { - if (error instanceof TimeoutError) { - console.log('Request timed out'); - } else if (error instanceof RateLimitError) { - console.log(`Rate limited: retry after ${error.retryAfter}ms`); - } else if (error instanceof HttpError) { - console.log(`HTTP error ${error.status}: ${error.message}`); - } -} -``` - -## Retry Configuration - -```typescript -const client = new HttpClient({ - retries: 3, // Retry up to 3 times - retryDelay: 1000, // Base delay of 1 second - // Exponential backoff: 1s, 2s, 4s -}); - -// Or per-request retry configuration -const response = await client.get('/api/data', { - retries: 5, - retryDelay: 500 -}); -``` - -## Timeout Handling - -```typescript -// Global timeout -const client = new HttpClient({ - timeout: 30000 // 30 seconds -}); - -// Per-request timeout -const response = await client.get('/api/data', { - timeout: 5000 // 5 seconds for this request -}); -``` - -## Custom Status Validation - -```typescript -const client = new HttpClient({ - validateStatus: (status) => { - // Accept 2xx and 3xx status codes - return status >= 200 && status < 400; - } -}); - -// Or per-request validation -const response = await client.get('/api/data', { - validateStatus: (status) => status === 200 || status === 404 -}); -``` - -## TypeScript Support - -The library is fully typed with TypeScript: - -```typescript -interface User { - id: number; - name: string; - email: string; -} - -// Response data is properly typed -const response = await client.get('/api/users'); -const users: User[] = response.data; - -// Request configuration is validated -const config: RequestConfig = { - method: 'POST', - url: '/api/users', - body: { name: 'John' }, - timeout: 5000 -}; -``` - -## Integration with Logger - -```typescript -import { logger } from '@stock-bot/logger'; -import { HttpClient } from '@stock-bot/http'; - -const client = new HttpClient({ - baseURL: 'https://api.example.com' -}, logger); - -// All requests will be logged with debug/warn/error levels -``` - -## Testing - -```bash -# Run tests -bun test - -# Run with coverage -bun test --coverage - -# Watch mode -bun test --watch -``` - -## Features - -### Proxy Support -- HTTP and HTTPS proxies -- SOCKS4 and SOCKS5 proxies -- Authentication support -- Automatic agent creation - -### Rate Limiting -- Token bucket algorithm -- Configurable window and request limits -- Skip successful/failed requests options -- Real-time status monitoring - -### Retry Logic -- Exponential backoff -- Configurable retry attempts -- Smart retry conditions (5xx errors only) -- Per-request retry override - -### Error Handling -- Typed error classes -- Detailed error information -- Request/response context -- Timeout detection - -### Performance -- Built on modern Fetch API -- Minimal dependencies -- Tree-shakeable exports -- TypeScript optimization - -## License - -MIT License - see LICENSE file for details. +# HTTP Client Library + +A comprehensive HTTP client library for the Stock Bot platform with built-in support for: + +- βœ… **Fetch API** - Modern, promise-based HTTP requests +- βœ… **Proxy Support** - HTTP, HTTPS, SOCKS4, and SOCKS5 proxies +- βœ… **Rate Limiting** - Configurable request rate limiting +- βœ… **Timeout Handling** - Request timeouts with abort controllers +- βœ… **Retry Logic** - Automatic retries with exponential backoff +- βœ… **TypeScript** - Full TypeScript support with type safety +- βœ… **Logging Integration** - Optional logger integration + +## Installation + +```bash +bun add @stock-bot/http +``` + +## Basic Usage + +```typescript +import { HttpClient } from '@stock-bot/http'; + +// Create a client with default configuration +const client = new HttpClient(); + +// Make a GET request +const response = await client.get('https://api.example.com/data'); +console.log(response.data); + +// Make a POST request +const postResponse = await client.post('https://api.example.com/users', { + name: 'John Doe', + email: 'john@example.com' +}); +``` + +## Advanced Configuration + +```typescript +import { HttpClient } from '@stock-bot/http'; +import { logger } from '@stock-bot/logger'; + +const client = new HttpClient({ + baseURL: 'https://api.example.com', + timeout: 10000, // 10 seconds + retries: 3, + retryDelay: 1000, // 1 second base delay + defaultHeaders: { + 'Authorization': 'Bearer token', + 'User-Agent': 'Stock-Bot/1.0' + }, + validateStatus: (status) => status < 400 +}, logger); +``` + +## Proxy Support + +### HTTP/HTTPS Proxy + +```typescript +const client = new HttpClient({ + proxy: { + type: 'http', + host: 'proxy.example.com', + port: 8080, + username: 'user', // optional + password: 'pass' // optional + } +}); +``` + +### SOCKS Proxy + +```typescript +const client = new HttpClient({ + proxy: { + type: 'socks5', + host: 'socks-proxy.example.com', + port: 1080, + username: 'user', // optional + password: 'pass' // optional + } +}); +``` + +## Rate Limiting + +```typescript +const client = new HttpClient({ + rateLimit: { + maxRequests: 100, // Max 100 requests + windowMs: 60 * 1000, // Per 1 minute + skipSuccessfulRequests: false, + skipFailedRequests: true // Don't count failed requests + } +}); + +// Check rate limit status +const status = client.getRateLimitStatus(); +console.log(`${status.currentCount}/${status.maxRequests} requests used`); +``` + +## Request Methods + +```typescript +// GET request +const getData = await client.get('/api/data'); + +// POST request with body +const postData = await client.post('/api/users', { + name: 'John', + email: 'john@example.com' +}); + +// PUT request +const putData = await client.put('/api/users/1', updatedUser); + +// DELETE request +const deleteData = await client.delete('/api/users/1'); + +// PATCH request +const patchData = await client.patch('/api/users/1', { name: 'Jane' }); + +// Custom request +const customResponse = await client.request({ + method: 'POST', + url: '/api/custom', + headers: { 'X-Custom': 'value' }, + body: { data: 'custom' }, + timeout: 5000 +}); +``` + +## Error Handling + +```typescript +import { HttpError, TimeoutError, RateLimitError } from '@stock-bot/http'; + +try { + const response = await client.get('/api/data'); +} catch (error) { + if (error instanceof TimeoutError) { + console.log('Request timed out'); + } else if (error instanceof RateLimitError) { + console.log(`Rate limited: retry after ${error.retryAfter}ms`); + } else if (error instanceof HttpError) { + console.log(`HTTP error ${error.status}: ${error.message}`); + } +} +``` + +## Retry Configuration + +```typescript +const client = new HttpClient({ + retries: 3, // Retry up to 3 times + retryDelay: 1000, // Base delay of 1 second + // Exponential backoff: 1s, 2s, 4s +}); + +// Or per-request retry configuration +const response = await client.get('/api/data', { + retries: 5, + retryDelay: 500 +}); +``` + +## Timeout Handling + +```typescript +// Global timeout +const client = new HttpClient({ + timeout: 30000 // 30 seconds +}); + +// Per-request timeout +const response = await client.get('/api/data', { + timeout: 5000 // 5 seconds for this request +}); +``` + +## Custom Status Validation + +```typescript +const client = new HttpClient({ + validateStatus: (status) => { + // Accept 2xx and 3xx status codes + return status >= 200 && status < 400; + } +}); + +// Or per-request validation +const response = await client.get('/api/data', { + validateStatus: (status) => status === 200 || status === 404 +}); +``` + +## TypeScript Support + +The library is fully typed with TypeScript: + +```typescript +interface User { + id: number; + name: string; + email: string; +} + +// Response data is properly typed +const response = await client.get('/api/users'); +const users: User[] = response.data; + +// Request configuration is validated +const config: RequestConfig = { + method: 'POST', + url: '/api/users', + body: { name: 'John' }, + timeout: 5000 +}; +``` + +## Integration with Logger + +```typescript +import { logger } from '@stock-bot/logger'; +import { HttpClient } from '@stock-bot/http'; + +const client = new HttpClient({ + baseURL: 'https://api.example.com' +}, logger); + +// All requests will be logged with debug/warn/error levels +``` + +## Testing + +```bash +# Run tests +bun test + +# Run with coverage +bun test --coverage + +# Watch mode +bun test --watch +``` + +## Features + +### Proxy Support +- HTTP and HTTPS proxies +- SOCKS4 and SOCKS5 proxies +- Authentication support +- Automatic agent creation + +### Rate Limiting +- Token bucket algorithm +- Configurable window and request limits +- Skip successful/failed requests options +- Real-time status monitoring + +### Retry Logic +- Exponential backoff +- Configurable retry attempts +- Smart retry conditions (5xx errors only) +- Per-request retry override + +### Error Handling +- Typed error classes +- Detailed error information +- Request/response context +- Timeout detection + +### Performance +- Built on modern Fetch API +- Minimal dependencies +- Tree-shakeable exports +- TypeScript optimization + +## License + +MIT License - see LICENSE file for details. diff --git a/libs/http/package.json b/libs/http/package.json index f45ad11..3ebc824 100644 --- a/libs/http/package.json +++ b/libs/http/package.json @@ -1,44 +1,44 @@ -{ - "name": "@stock-bot/http", - "version": "1.0.0", - "description": "HTTP client library with proxy support, rate limiting, and timeout for Stock Bot platform", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "test": "bun test", - "test:watch": "bun test --watch", - "test:coverage": "bun test --coverage", - "lint": "eslint src/**/*.ts", - "type-check": "tsc --noEmit", - "clean": "rimraf dist" - }, - "dependencies": { - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "axios": "^1.9.0", - "http-proxy-agent": "^7.0.2", - "https-proxy-agent": "^7.0.6", - "socks-proxy-agent": "^8.0.5" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "eslint": "^8.56.0", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15" - }, - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] +{ + "name": "@stock-bot/http", + "version": "1.0.0", + "description": "HTTP client library with proxy support, rate limiting, and timeout for Stock Bot platform", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "test": "bun test", + "test:watch": "bun test --watch", + "test:coverage": "bun test --coverage", + "lint": "eslint src/**/*.ts", + "type-check": "tsc --noEmit", + "clean": "rimraf dist" + }, + "dependencies": { + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "axios": "^1.9.0", + "http-proxy-agent": "^7.0.2", + "https-proxy-agent": "^7.0.6", + "socks-proxy-agent": "^8.0.5" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "eslint": "^8.56.0", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15" + }, + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] } \ No newline at end of file diff --git a/libs/http/src/adapters/axios-adapter.ts b/libs/http/src/adapters/axios-adapter.ts index 7b35aa9..a9cab10 100644 --- a/libs/http/src/adapters/axios-adapter.ts +++ b/libs/http/src/adapters/axios-adapter.ts @@ -1,53 +1,53 @@ -import axios, { type AxiosRequestConfig, type AxiosResponse } from 'axios'; -import type { RequestConfig, HttpResponse } from '../types'; -import type { RequestAdapter } from './types'; -import { ProxyManager } from '../proxy-manager'; -import { HttpError } from '../types'; - -/** - * Axios adapter for SOCKS proxies - */ -export class AxiosAdapter implements RequestAdapter { - canHandle(config: RequestConfig): boolean { - // Axios handles SOCKS proxies - return Boolean(config.proxy && (config.proxy.protocol === 'socks4' || config.proxy.protocol === 'socks5')); - } - - async request(config: RequestConfig, signal: AbortSignal): Promise> { - const { url, method = 'GET', headers, data, proxy } = config; - - if (!proxy) { - throw new Error('Axios adapter requires proxy configuration'); - } - - // Create proxy configuration using ProxyManager - const axiosConfig: AxiosRequestConfig = { - ...ProxyManager.createAxiosConfig(proxy), - url, - method, - headers, - data, - signal, - // Don't throw on non-2xx status codes - let caller handle - validateStatus: () => true, - }; const response: AxiosResponse = await axios(axiosConfig); - - const httpResponse: HttpResponse = { - data: response.data, - status: response.status, - headers: response.headers as Record, - ok: response.status >= 200 && response.status < 300, - }; - - // Throw HttpError for non-2xx status codes - if (!httpResponse.ok) { - throw new HttpError( - `Request failed with status ${response.status}`, - response.status, - httpResponse - ); - } - - return httpResponse; - } -} +import axios, { type AxiosRequestConfig, type AxiosResponse } from 'axios'; +import type { RequestConfig, HttpResponse } from '../types'; +import type { RequestAdapter } from './types'; +import { ProxyManager } from '../proxy-manager'; +import { HttpError } from '../types'; + +/** + * Axios adapter for SOCKS proxies + */ +export class AxiosAdapter implements RequestAdapter { + canHandle(config: RequestConfig): boolean { + // Axios handles SOCKS proxies + return Boolean(config.proxy && (config.proxy.protocol === 'socks4' || config.proxy.protocol === 'socks5')); + } + + async request(config: RequestConfig, signal: AbortSignal): Promise> { + const { url, method = 'GET', headers, data, proxy } = config; + + if (!proxy) { + throw new Error('Axios adapter requires proxy configuration'); + } + + // Create proxy configuration using ProxyManager + const axiosConfig: AxiosRequestConfig = { + ...ProxyManager.createAxiosConfig(proxy), + url, + method, + headers, + data, + signal, + // Don't throw on non-2xx status codes - let caller handle + validateStatus: () => true, + }; const response: AxiosResponse = await axios(axiosConfig); + + const httpResponse: HttpResponse = { + data: response.data, + status: response.status, + headers: response.headers as Record, + ok: response.status >= 200 && response.status < 300, + }; + + // Throw HttpError for non-2xx status codes + if (!httpResponse.ok) { + throw new HttpError( + `Request failed with status ${response.status}`, + response.status, + httpResponse + ); + } + + return httpResponse; + } +} diff --git a/libs/http/src/adapters/factory.ts b/libs/http/src/adapters/factory.ts index 1946d28..99d6577 100644 --- a/libs/http/src/adapters/factory.ts +++ b/libs/http/src/adapters/factory.ts @@ -1,28 +1,28 @@ -import type { RequestConfig } from '../types'; -import type { RequestAdapter } from './types'; -import { FetchAdapter } from './fetch-adapter'; -import { AxiosAdapter } from './axios-adapter'; - -/** - * Factory for creating the appropriate request adapter - */ -export class AdapterFactory { - private static adapters: RequestAdapter[] = [ - new AxiosAdapter(), // Check SOCKS first - new FetchAdapter(), // Fallback to fetch for everything else - ]; - - /** - * Get the appropriate adapter for the given configuration - */ - static getAdapter(config: RequestConfig): RequestAdapter { - for (const adapter of this.adapters) { - if (adapter.canHandle(config)) { - return adapter; - } - } - - // Fallback to fetch adapter - return new FetchAdapter(); - } -} +import type { RequestConfig } from '../types'; +import type { RequestAdapter } from './types'; +import { FetchAdapter } from './fetch-adapter'; +import { AxiosAdapter } from './axios-adapter'; + +/** + * Factory for creating the appropriate request adapter + */ +export class AdapterFactory { + private static adapters: RequestAdapter[] = [ + new AxiosAdapter(), // Check SOCKS first + new FetchAdapter(), // Fallback to fetch for everything else + ]; + + /** + * Get the appropriate adapter for the given configuration + */ + static getAdapter(config: RequestConfig): RequestAdapter { + for (const adapter of this.adapters) { + if (adapter.canHandle(config)) { + return adapter; + } + } + + // Fallback to fetch adapter + return new FetchAdapter(); + } +} diff --git a/libs/http/src/adapters/fetch-adapter.ts b/libs/http/src/adapters/fetch-adapter.ts index d9c4e4b..c7de6b8 100644 --- a/libs/http/src/adapters/fetch-adapter.ts +++ b/libs/http/src/adapters/fetch-adapter.ts @@ -1,66 +1,66 @@ -import type { RequestConfig, HttpResponse } from '../types'; -import type { RequestAdapter } from './types'; -import { ProxyManager } from '../proxy-manager'; -import { HttpError } from '../types'; - -/** - * Fetch adapter for HTTP/HTTPS proxies and non-proxy requests - */ -export class FetchAdapter implements RequestAdapter { - canHandle(config: RequestConfig): boolean { - // Fetch handles non-proxy requests and HTTP/HTTPS proxies - return !config.proxy || config.proxy.protocol === 'http' || config.proxy.protocol === 'https'; - } - - async request(config: RequestConfig, signal: AbortSignal): Promise> { - const { url, method = 'GET', headers, data, proxy } = config; - - // Prepare fetch options - const fetchOptions: RequestInit = { - method, - headers, - signal, - }; - - // Add body for non-GET requests - if (data && method !== 'GET') { - fetchOptions.body = typeof data === 'string' ? data : JSON.stringify(data); - if (typeof data === 'object') { - fetchOptions.headers = { 'Content-Type': 'application/json', ...fetchOptions.headers }; - } - } - - // Add proxy if needed (using Bun's built-in proxy support) - if (proxy) { - (fetchOptions as any).proxy = ProxyManager.createProxyUrl(proxy); - } const response = await fetch(url, fetchOptions); - - // Parse response based on content type - let responseData: T; - const contentType = response.headers.get('content-type') || ''; - - if (contentType.includes('application/json')) { - responseData = await response.json() as T; - } else { - responseData = await response.text() as T; - } - - const httpResponse: HttpResponse = { - data: responseData, - status: response.status, - headers: Object.fromEntries(response.headers.entries()), - ok: response.ok, - }; - - // Throw HttpError for non-2xx status codes - if (!response.ok) { - throw new HttpError( - `Request failed with status ${response.status}`, - response.status, - httpResponse - ); - } - - return httpResponse; - } -} +import type { RequestConfig, HttpResponse } from '../types'; +import type { RequestAdapter } from './types'; +import { ProxyManager } from '../proxy-manager'; +import { HttpError } from '../types'; + +/** + * Fetch adapter for HTTP/HTTPS proxies and non-proxy requests + */ +export class FetchAdapter implements RequestAdapter { + canHandle(config: RequestConfig): boolean { + // Fetch handles non-proxy requests and HTTP/HTTPS proxies + return !config.proxy || config.proxy.protocol === 'http' || config.proxy.protocol === 'https'; + } + + async request(config: RequestConfig, signal: AbortSignal): Promise> { + const { url, method = 'GET', headers, data, proxy } = config; + + // Prepare fetch options + const fetchOptions: RequestInit = { + method, + headers, + signal, + }; + + // Add body for non-GET requests + if (data && method !== 'GET') { + fetchOptions.body = typeof data === 'string' ? data : JSON.stringify(data); + if (typeof data === 'object') { + fetchOptions.headers = { 'Content-Type': 'application/json', ...fetchOptions.headers }; + } + } + + // Add proxy if needed (using Bun's built-in proxy support) + if (proxy) { + (fetchOptions as any).proxy = ProxyManager.createProxyUrl(proxy); + } const response = await fetch(url, fetchOptions); + + // Parse response based on content type + let responseData: T; + const contentType = response.headers.get('content-type') || ''; + + if (contentType.includes('application/json')) { + responseData = await response.json() as T; + } else { + responseData = await response.text() as T; + } + + const httpResponse: HttpResponse = { + data: responseData, + status: response.status, + headers: Object.fromEntries(response.headers.entries()), + ok: response.ok, + }; + + // Throw HttpError for non-2xx status codes + if (!response.ok) { + throw new HttpError( + `Request failed with status ${response.status}`, + response.status, + httpResponse + ); + } + + return httpResponse; + } +} diff --git a/libs/http/src/adapters/index.ts b/libs/http/src/adapters/index.ts index b28aa12..c65cab0 100644 --- a/libs/http/src/adapters/index.ts +++ b/libs/http/src/adapters/index.ts @@ -1,4 +1,4 @@ -export * from './types'; -export * from './fetch-adapter'; -export * from './axios-adapter'; -export * from './factory'; +export * from './types'; +export * from './fetch-adapter'; +export * from './axios-adapter'; +export * from './factory'; diff --git a/libs/http/src/adapters/types.ts b/libs/http/src/adapters/types.ts index 28a7eed..46cc709 100644 --- a/libs/http/src/adapters/types.ts +++ b/libs/http/src/adapters/types.ts @@ -1,16 +1,16 @@ -import type { RequestConfig, HttpResponse } from '../types'; - -/** - * Request adapter interface for different HTTP implementations - */ -export interface RequestAdapter { - /** - * Execute an HTTP request - */ - request(config: RequestConfig, signal: AbortSignal): Promise>; - - /** - * Check if this adapter can handle the given configuration - */ - canHandle(config: RequestConfig): boolean; -} +import type { RequestConfig, HttpResponse } from '../types'; + +/** + * Request adapter interface for different HTTP implementations + */ +export interface RequestAdapter { + /** + * Execute an HTTP request + */ + request(config: RequestConfig, signal: AbortSignal): Promise>; + + /** + * Check if this adapter can handle the given configuration + */ + canHandle(config: RequestConfig): boolean; +} diff --git a/libs/http/src/client.ts b/libs/http/src/client.ts index 37ee6b4..f88013e 100644 --- a/libs/http/src/client.ts +++ b/libs/http/src/client.ts @@ -1,155 +1,155 @@ -import type { Logger } from '@stock-bot/logger'; -import type { - HttpClientConfig, - RequestConfig, - HttpResponse, -} from './types'; -import { HttpError } from './types'; -import { ProxyManager } from './proxy-manager'; -import { AdapterFactory } from './adapters/index'; - -export class HttpClient { - private readonly config: HttpClientConfig; - private readonly logger?: Logger; - - constructor(config: HttpClientConfig = {}, logger?: Logger) { - this.config = config; - this.logger = logger?.child('http-client'); - } - - // Convenience methods - async get(url: string, config: Omit = {}): Promise> { - return this.request({ ...config, method: 'GET', url }); - } - - async post(url: string, data?: any, config: Omit = {}): Promise> { - return this.request({ ...config, method: 'POST', url, data }); - } - - async put(url: string, data?: any, config: Omit = {}): Promise> { - return this.request({ ...config, method: 'PUT', url, data }); - } - - async del(url: string, config: Omit = {}): Promise> { - return this.request({ ...config, method: 'DELETE', url }); - } - - async patch(url: string, data?: any, config: Omit = {}): Promise> { - return this.request({ ...config, method: 'PATCH', url, data }); - } - - /** - * Main request method - clean and simple - */ - async request(config: RequestConfig): Promise> { - const finalConfig = this.mergeConfig(config); - const startTime = Date.now(); - - this.logger?.debug('Making HTTP request', { - method: finalConfig.method, - url: finalConfig.url, - hasProxy: !!finalConfig.proxy - }); - - try { - const response = await this.executeRequest(finalConfig); - response.responseTime = Date.now() - startTime; - - this.logger?.debug('HTTP request successful', { - method: finalConfig.method, - url: finalConfig.url, - status: response.status, - responseTime: response.responseTime, - }); - - return response; - } catch (error) { - if( this.logger?.getServiceName() === 'proxy-service' ) { - this.logger?.debug('HTTP request failed', { - method: finalConfig.method, - url: finalConfig.url, - error: (error as Error).message, - }); - }else{ - this.logger?.warn('HTTP request failed', { - method: finalConfig.method, - url: finalConfig.url, - error: (error as Error).message, - }); - } - throw error; - } - } - - /** - * Execute request with timeout handling - no race conditions - */ private async executeRequest(config: RequestConfig): Promise> { - const timeout = config.timeout ?? this.config.timeout ?? 30000; - const controller = new AbortController(); - const startTime = Date.now(); - let timeoutId: NodeJS.Timeout | undefined; - - // Set up timeout - // Create a timeout promise that will reject - const timeoutPromise = new Promise((_, reject) => { - timeoutId = setTimeout(() => { - const elapsed = Date.now() - startTime; - this.logger?.debug('Request timeout triggered', { - url: config.url, - method: config.method, - timeout, - elapsed - }); - - // Attempt to abort (may or may not work with Bun) - controller.abort(); - - // Force rejection regardless of signal behavior - reject(new HttpError(`Request timeout after ${timeout}ms (elapsed: ${elapsed}ms)`)); - }, timeout); - }); - - try { - // Get the appropriate adapter - const adapter = AdapterFactory.getAdapter(config); - - const response = await Promise.race([ - adapter.request(config, controller.signal), - timeoutPromise - ]); - - this.logger?.debug('Adapter request successful', { url: config.url, elapsedMs: Date.now() - startTime }); - // Clear timeout on success - clearTimeout(timeoutId); - - return response; - } catch (error) { - const elapsed = Date.now() - startTime; - this.logger?.debug('Adapter failed successful', { url: config.url, elapsedMs: Date.now() - startTime }); - clearTimeout(timeoutId); - - // Handle timeout - if (controller.signal.aborted) { - throw new HttpError(`Request timeout after ${timeout}ms`); - } - - // Re-throw other errors - if (error instanceof HttpError) { - throw error; - } - - throw new HttpError(`Request failed: ${(error as Error).message}`); - } - } - - /** - * Merge configs with defaults - */ - private mergeConfig(config: RequestConfig): RequestConfig { - return { - ...config, - headers: { ...this.config.headers, ...config.headers }, - timeout: config.timeout ?? this.config.timeout, - }; - } -} +import type { Logger } from '@stock-bot/logger'; +import type { + HttpClientConfig, + RequestConfig, + HttpResponse, +} from './types'; +import { HttpError } from './types'; +import { ProxyManager } from './proxy-manager'; +import { AdapterFactory } from './adapters/index'; + +export class HttpClient { + private readonly config: HttpClientConfig; + private readonly logger?: Logger; + + constructor(config: HttpClientConfig = {}, logger?: Logger) { + this.config = config; + this.logger = logger?.child('http-client'); + } + + // Convenience methods + async get(url: string, config: Omit = {}): Promise> { + return this.request({ ...config, method: 'GET', url }); + } + + async post(url: string, data?: any, config: Omit = {}): Promise> { + return this.request({ ...config, method: 'POST', url, data }); + } + + async put(url: string, data?: any, config: Omit = {}): Promise> { + return this.request({ ...config, method: 'PUT', url, data }); + } + + async del(url: string, config: Omit = {}): Promise> { + return this.request({ ...config, method: 'DELETE', url }); + } + + async patch(url: string, data?: any, config: Omit = {}): Promise> { + return this.request({ ...config, method: 'PATCH', url, data }); + } + + /** + * Main request method - clean and simple + */ + async request(config: RequestConfig): Promise> { + const finalConfig = this.mergeConfig(config); + const startTime = Date.now(); + + this.logger?.debug('Making HTTP request', { + method: finalConfig.method, + url: finalConfig.url, + hasProxy: !!finalConfig.proxy + }); + + try { + const response = await this.executeRequest(finalConfig); + response.responseTime = Date.now() - startTime; + + this.logger?.debug('HTTP request successful', { + method: finalConfig.method, + url: finalConfig.url, + status: response.status, + responseTime: response.responseTime, + }); + + return response; + } catch (error) { + if( this.logger?.getServiceName() === 'proxy-service' ) { + this.logger?.debug('HTTP request failed', { + method: finalConfig.method, + url: finalConfig.url, + error: (error as Error).message, + }); + }else{ + this.logger?.warn('HTTP request failed', { + method: finalConfig.method, + url: finalConfig.url, + error: (error as Error).message, + }); + } + throw error; + } + } + + /** + * Execute request with timeout handling - no race conditions + */ private async executeRequest(config: RequestConfig): Promise> { + const timeout = config.timeout ?? this.config.timeout ?? 30000; + const controller = new AbortController(); + const startTime = Date.now(); + let timeoutId: NodeJS.Timeout | undefined; + + // Set up timeout + // Create a timeout promise that will reject + const timeoutPromise = new Promise((_, reject) => { + timeoutId = setTimeout(() => { + const elapsed = Date.now() - startTime; + this.logger?.debug('Request timeout triggered', { + url: config.url, + method: config.method, + timeout, + elapsed + }); + + // Attempt to abort (may or may not work with Bun) + controller.abort(); + + // Force rejection regardless of signal behavior + reject(new HttpError(`Request timeout after ${timeout}ms (elapsed: ${elapsed}ms)`)); + }, timeout); + }); + + try { + // Get the appropriate adapter + const adapter = AdapterFactory.getAdapter(config); + + const response = await Promise.race([ + adapter.request(config, controller.signal), + timeoutPromise + ]); + + this.logger?.debug('Adapter request successful', { url: config.url, elapsedMs: Date.now() - startTime }); + // Clear timeout on success + clearTimeout(timeoutId); + + return response; + } catch (error) { + const elapsed = Date.now() - startTime; + this.logger?.debug('Adapter failed successful', { url: config.url, elapsedMs: Date.now() - startTime }); + clearTimeout(timeoutId); + + // Handle timeout + if (controller.signal.aborted) { + throw new HttpError(`Request timeout after ${timeout}ms`); + } + + // Re-throw other errors + if (error instanceof HttpError) { + throw error; + } + + throw new HttpError(`Request failed: ${(error as Error).message}`); + } + } + + /** + * Merge configs with defaults + */ + private mergeConfig(config: RequestConfig): RequestConfig { + return { + ...config, + headers: { ...this.config.headers, ...config.headers }, + timeout: config.timeout ?? this.config.timeout, + }; + } +} diff --git a/libs/http/src/index.ts b/libs/http/src/index.ts index a70ad6e..2515d55 100644 --- a/libs/http/src/index.ts +++ b/libs/http/src/index.ts @@ -1,8 +1,8 @@ -// Re-export all types and classes -export * from './types'; -export * from './client'; -export * from './proxy-manager'; -export * from './adapters/index'; - -// Default export -export { HttpClient as default } from './client'; +// Re-export all types and classes +export * from './types'; +export * from './client'; +export * from './proxy-manager'; +export * from './adapters/index'; + +// Default export +export { HttpClient as default } from './client'; diff --git a/libs/http/src/proxy-manager.ts b/libs/http/src/proxy-manager.ts index e9248ad..1e92afc 100644 --- a/libs/http/src/proxy-manager.ts +++ b/libs/http/src/proxy-manager.ts @@ -1,66 +1,66 @@ -import axios, { AxiosRequestConfig, type AxiosInstance } from 'axios'; -import { SocksProxyAgent } from 'socks-proxy-agent'; -import { HttpsProxyAgent } from 'https-proxy-agent'; -import { HttpProxyAgent } from 'http-proxy-agent'; -import type { ProxyInfo } from './types'; - -export class ProxyManager { - /** - * Determine if we should use Bun fetch (HTTP/HTTPS) or Axios (SOCKS) - */ - static shouldUseBunFetch(proxy: ProxyInfo): boolean { - return proxy.protocol === 'http' || proxy.protocol === 'https'; - } - /** - * Create proxy URL for both Bun fetch and Axios proxy agents - */ - static createProxyUrl(proxy: ProxyInfo): string { - const { protocol, host, port, username, password } = proxy; - if (username && password) { - return `${protocol}://${encodeURIComponent(username)}:${encodeURIComponent(password)}@${host}:${port}`; - } - return `${protocol}://${host}:${port}`; - } - - /** - * Create appropriate agent for Axios based on proxy type - */ - static createProxyAgent(proxy: ProxyInfo) { - this.validateConfig(proxy); - - const proxyUrl = this.createProxyUrl(proxy); - switch (proxy.protocol) { - case 'socks4': - case 'socks5': - // console.log(`Using SOCKS proxy: ${proxyUrl}`); - return new SocksProxyAgent(proxyUrl); - case 'http': - return new HttpProxyAgent(proxyUrl); - case 'https': - return new HttpsProxyAgent(proxyUrl); - default: - throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`); - } - } - /** - * Create Axios instance with proxy configuration - */ - static createAxiosConfig(proxy: ProxyInfo): AxiosRequestConfig { - const agent = this.createProxyAgent(proxy); - return { - httpAgent: agent, - httpsAgent: agent, - }; - } - /** - * Simple proxy config validation - */ - static validateConfig(proxy: ProxyInfo): void { - if (!proxy.host || !proxy.port) { - throw new Error('Proxy host and port are required'); - } - if (!['http', 'https', 'socks4', 'socks5'].includes(proxy.protocol)) { - throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`); - } - } -} +import axios, { AxiosRequestConfig, type AxiosInstance } from 'axios'; +import { SocksProxyAgent } from 'socks-proxy-agent'; +import { HttpsProxyAgent } from 'https-proxy-agent'; +import { HttpProxyAgent } from 'http-proxy-agent'; +import type { ProxyInfo } from './types'; + +export class ProxyManager { + /** + * Determine if we should use Bun fetch (HTTP/HTTPS) or Axios (SOCKS) + */ + static shouldUseBunFetch(proxy: ProxyInfo): boolean { + return proxy.protocol === 'http' || proxy.protocol === 'https'; + } + /** + * Create proxy URL for both Bun fetch and Axios proxy agents + */ + static createProxyUrl(proxy: ProxyInfo): string { + const { protocol, host, port, username, password } = proxy; + if (username && password) { + return `${protocol}://${encodeURIComponent(username)}:${encodeURIComponent(password)}@${host}:${port}`; + } + return `${protocol}://${host}:${port}`; + } + + /** + * Create appropriate agent for Axios based on proxy type + */ + static createProxyAgent(proxy: ProxyInfo) { + this.validateConfig(proxy); + + const proxyUrl = this.createProxyUrl(proxy); + switch (proxy.protocol) { + case 'socks4': + case 'socks5': + // console.log(`Using SOCKS proxy: ${proxyUrl}`); + return new SocksProxyAgent(proxyUrl); + case 'http': + return new HttpProxyAgent(proxyUrl); + case 'https': + return new HttpsProxyAgent(proxyUrl); + default: + throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`); + } + } + /** + * Create Axios instance with proxy configuration + */ + static createAxiosConfig(proxy: ProxyInfo): AxiosRequestConfig { + const agent = this.createProxyAgent(proxy); + return { + httpAgent: agent, + httpsAgent: agent, + }; + } + /** + * Simple proxy config validation + */ + static validateConfig(proxy: ProxyInfo): void { + if (!proxy.host || !proxy.port) { + throw new Error('Proxy host and port are required'); + } + if (!['http', 'https', 'socks4', 'socks5'].includes(proxy.protocol)) { + throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`); + } + } +} diff --git a/libs/http/src/types.ts b/libs/http/src/types.ts index c7a1b83..1f173a9 100644 --- a/libs/http/src/types.ts +++ b/libs/http/src/types.ts @@ -1,49 +1,49 @@ -// Minimal types for fast HTTP client -export type HttpMethod = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH'; - -export interface ProxyInfo { - source?: string; - protocol: 'http' | 'https' | 'socks4' | 'socks5'; - host: string; - port: number; - username?: string; - password?: string; - url?: string; // Full proxy URL for adapters - isWorking?: boolean; - responseTime?: number; - error?: string; - checkedAt?: Date; -} - -export interface HttpClientConfig { - timeout?: number; - headers?: Record; -} - -export interface RequestConfig { - method?: HttpMethod; - url: string; - headers?: Record; - data?: any; // Changed from 'body' to 'data' for consistency - timeout?: number; - proxy?: ProxyInfo; -} - -export interface HttpResponse { - data: T; - status: number; - headers: Record; - ok: boolean; - responseTime?: number; -} - -export class HttpError extends Error { - constructor( - message: string, - public status?: number, - public response?: HttpResponse - ) { - super(message); - this.name = 'HttpError'; - } -} +// Minimal types for fast HTTP client +export type HttpMethod = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH'; + +export interface ProxyInfo { + source?: string; + protocol: 'http' | 'https' | 'socks4' | 'socks5'; + host: string; + port: number; + username?: string; + password?: string; + url?: string; // Full proxy URL for adapters + isWorking?: boolean; + responseTime?: number; + error?: string; + checkedAt?: Date; +} + +export interface HttpClientConfig { + timeout?: number; + headers?: Record; +} + +export interface RequestConfig { + method?: HttpMethod; + url: string; + headers?: Record; + data?: any; // Changed from 'body' to 'data' for consistency + timeout?: number; + proxy?: ProxyInfo; +} + +export interface HttpResponse { + data: T; + status: number; + headers: Record; + ok: boolean; + responseTime?: number; +} + +export class HttpError extends Error { + constructor( + message: string, + public status?: number, + public response?: HttpResponse + ) { + super(message); + this.name = 'HttpError'; + } +} diff --git a/libs/http/test/http-integration.test.ts b/libs/http/test/http-integration.test.ts index e49a31a..c2367f2 100644 --- a/libs/http/test/http-integration.test.ts +++ b/libs/http/test/http-integration.test.ts @@ -1,154 +1,154 @@ -import { describe, test, expect, beforeAll, afterAll } from 'bun:test'; -import { HttpClient, HttpError } from '../src/index'; -import { MockServer } from './mock-server'; - -/** - * Integration tests for HTTP client with real network scenarios - * These tests use external services and may be affected by network conditions - */ - -let mockServer: MockServer; -let mockServerBaseUrl: string; - -beforeAll(async () => { - mockServer = new MockServer(); - await mockServer.start(); - mockServerBaseUrl = mockServer.getBaseUrl(); -}); - -afterAll(async () => { - await mockServer.stop(); -}); - -describe('HTTP Integration Tests', () => { - let client: HttpClient; - - beforeAll(() => { - client = new HttpClient({ - timeout: 10000 - }); - }); - - describe('Real-world scenarios', () => { - test('should handle JSON API responses', async () => { - try { - const response = await client.get('https://jsonplaceholder.typicode.com/posts/1'); - - expect(response.status).toBe(200); - expect(response.data).toHaveProperty('id'); - expect(response.data).toHaveProperty('title'); - expect(response.data).toHaveProperty('body'); - } catch (error) { - console.warn('External API test skipped due to network issues:', (error as Error).message); - } - }); - - test('should handle large responses', async () => { - try { - const response = await client.get('https://jsonplaceholder.typicode.com/posts'); - - expect(response.status).toBe(200); - expect(Array.isArray(response.data)).toBe(true); - expect(response.data.length).toBeGreaterThan(0); - } catch (error) { - console.warn('Large response test skipped due to network issues:', (error as Error).message); - } - }); - - test('should handle POST with JSON data', async () => { - try { - const postData = { - title: 'Integration Test Post', - body: 'This is a test post from integration tests', - userId: 1 - }; - - const response = await client.post('https://jsonplaceholder.typicode.com/posts', postData); - - expect(response.status).toBe(201); - expect(response.data).toHaveProperty('id'); - expect(response.data.title).toBe(postData.title); - } catch (error) { - console.warn('POST integration test skipped due to network issues:', (error as Error).message); - } - }); - }); - - describe('Error scenarios with mock server', () => { test('should handle various HTTP status codes', async () => { - const successCodes = [200, 201]; - const errorCodes = [400, 401, 403, 404, 500, 503]; - - // Test success codes - for (const statusCode of successCodes) { - const response = await client.get(`${mockServerBaseUrl}/status/${statusCode}`); - expect(response.status).toBe(statusCode); - } - - // Test error codes (should throw HttpError) - for (const statusCode of errorCodes) { - await expect( - client.get(`${mockServerBaseUrl}/status/${statusCode}`) - ).rejects.toThrow(HttpError); - } - }); - - test('should handle malformed responses gracefully', async () => { - // Mock server returns valid JSON, so this test verifies our client handles it properly - const response = await client.get(`${mockServerBaseUrl}/`); - expect(response.status).toBe(200); - expect(typeof response.data).toBe('object'); - }); - - test('should handle concurrent requests', async () => { - const requests = Array.from({ length: 5 }, (_, i) => - client.get(`${mockServerBaseUrl}/`, { - headers: { 'X-Request-ID': `req-${i}` } - }) - ); - - const responses = await Promise.all(requests); - - responses.forEach((response, index) => { - expect(response.status).toBe(200); - expect(response.data.headers).toHaveProperty('x-request-id', `req-${index}`); - }); - }); - }); - - describe('Performance and reliability', () => { - test('should handle rapid sequential requests', async () => { - const startTime = Date.now(); - const requests = []; - - for (let i = 0; i < 10; i++) { - requests.push(client.get(`${mockServerBaseUrl}/`)); - } - - const responses = await Promise.all(requests); - const endTime = Date.now(); - - expect(responses).toHaveLength(10); - responses.forEach(response => { - expect(response.status).toBe(200); - }); - - console.log(`Completed 10 requests in ${endTime - startTime}ms`); - }); - - test('should maintain connection efficiency', async () => { - const clientWithKeepAlive = new HttpClient({ - timeout: 5000 - }); - - const requests = Array.from({ length: 3 }, () => - clientWithKeepAlive.get(`${mockServerBaseUrl}/`) - ); - - const responses = await Promise.all(requests); - - responses.forEach(response => { - expect(response.status).toBe(200); - }); - }); - }); -}); +import { describe, test, expect, beforeAll, afterAll } from 'bun:test'; +import { HttpClient, HttpError } from '../src/index'; +import { MockServer } from './mock-server'; + +/** + * Integration tests for HTTP client with real network scenarios + * These tests use external services and may be affected by network conditions + */ + +let mockServer: MockServer; +let mockServerBaseUrl: string; + +beforeAll(async () => { + mockServer = new MockServer(); + await mockServer.start(); + mockServerBaseUrl = mockServer.getBaseUrl(); +}); + +afterAll(async () => { + await mockServer.stop(); +}); + +describe('HTTP Integration Tests', () => { + let client: HttpClient; + + beforeAll(() => { + client = new HttpClient({ + timeout: 10000 + }); + }); + + describe('Real-world scenarios', () => { + test('should handle JSON API responses', async () => { + try { + const response = await client.get('https://jsonplaceholder.typicode.com/posts/1'); + + expect(response.status).toBe(200); + expect(response.data).toHaveProperty('id'); + expect(response.data).toHaveProperty('title'); + expect(response.data).toHaveProperty('body'); + } catch (error) { + console.warn('External API test skipped due to network issues:', (error as Error).message); + } + }); + + test('should handle large responses', async () => { + try { + const response = await client.get('https://jsonplaceholder.typicode.com/posts'); + + expect(response.status).toBe(200); + expect(Array.isArray(response.data)).toBe(true); + expect(response.data.length).toBeGreaterThan(0); + } catch (error) { + console.warn('Large response test skipped due to network issues:', (error as Error).message); + } + }); + + test('should handle POST with JSON data', async () => { + try { + const postData = { + title: 'Integration Test Post', + body: 'This is a test post from integration tests', + userId: 1 + }; + + const response = await client.post('https://jsonplaceholder.typicode.com/posts', postData); + + expect(response.status).toBe(201); + expect(response.data).toHaveProperty('id'); + expect(response.data.title).toBe(postData.title); + } catch (error) { + console.warn('POST integration test skipped due to network issues:', (error as Error).message); + } + }); + }); + + describe('Error scenarios with mock server', () => { test('should handle various HTTP status codes', async () => { + const successCodes = [200, 201]; + const errorCodes = [400, 401, 403, 404, 500, 503]; + + // Test success codes + for (const statusCode of successCodes) { + const response = await client.get(`${mockServerBaseUrl}/status/${statusCode}`); + expect(response.status).toBe(statusCode); + } + + // Test error codes (should throw HttpError) + for (const statusCode of errorCodes) { + await expect( + client.get(`${mockServerBaseUrl}/status/${statusCode}`) + ).rejects.toThrow(HttpError); + } + }); + + test('should handle malformed responses gracefully', async () => { + // Mock server returns valid JSON, so this test verifies our client handles it properly + const response = await client.get(`${mockServerBaseUrl}/`); + expect(response.status).toBe(200); + expect(typeof response.data).toBe('object'); + }); + + test('should handle concurrent requests', async () => { + const requests = Array.from({ length: 5 }, (_, i) => + client.get(`${mockServerBaseUrl}/`, { + headers: { 'X-Request-ID': `req-${i}` } + }) + ); + + const responses = await Promise.all(requests); + + responses.forEach((response, index) => { + expect(response.status).toBe(200); + expect(response.data.headers).toHaveProperty('x-request-id', `req-${index}`); + }); + }); + }); + + describe('Performance and reliability', () => { + test('should handle rapid sequential requests', async () => { + const startTime = Date.now(); + const requests = []; + + for (let i = 0; i < 10; i++) { + requests.push(client.get(`${mockServerBaseUrl}/`)); + } + + const responses = await Promise.all(requests); + const endTime = Date.now(); + + expect(responses).toHaveLength(10); + responses.forEach(response => { + expect(response.status).toBe(200); + }); + + console.log(`Completed 10 requests in ${endTime - startTime}ms`); + }); + + test('should maintain connection efficiency', async () => { + const clientWithKeepAlive = new HttpClient({ + timeout: 5000 + }); + + const requests = Array.from({ length: 3 }, () => + clientWithKeepAlive.get(`${mockServerBaseUrl}/`) + ); + + const responses = await Promise.all(requests); + + responses.forEach(response => { + expect(response.status).toBe(200); + }); + }); + }); +}); diff --git a/libs/http/test/http.test.ts b/libs/http/test/http.test.ts index 60caf6a..7763c98 100644 --- a/libs/http/test/http.test.ts +++ b/libs/http/test/http.test.ts @@ -1,159 +1,159 @@ -import { describe, test, expect, beforeEach, beforeAll, afterAll } from 'bun:test'; -import { HttpClient, HttpError, ProxyManager } from '../src/index'; -import type { ProxyInfo } from '../src/types'; -import { MockServer } from './mock-server'; - -// Global mock server instance -let mockServer: MockServer; -let mockServerBaseUrl: string; - -beforeAll(async () => { - // Start mock server for all tests - mockServer = new MockServer(); - await mockServer.start(); - mockServerBaseUrl = mockServer.getBaseUrl(); -}); - -afterAll(async () => { - // Stop mock server - await mockServer.stop(); -}); - -describe('HttpClient', () => { - let client: HttpClient; - - beforeEach(() => { - client = new HttpClient(); - }); - - describe('Basic functionality', () => { - test('should create client with default config', () => { - expect(client).toBeInstanceOf(HttpClient); - }); - - test('should make GET request', async () => { - const response = await client.get(`${mockServerBaseUrl}/`); - - expect(response.status).toBe(200); - expect(response.data).toHaveProperty('url'); - expect(response.data).toHaveProperty('method', 'GET'); - }); - - test('should make POST request with body', async () => { - const testData = { - title: 'Test Post', - body: 'Test body', - userId: 1, - }; - - const response = await client.post(`${mockServerBaseUrl}/post`, testData); - - expect(response.status).toBe(200); - expect(response.data).toHaveProperty('data'); - expect(response.data.data).toEqual(testData); - }); - - test('should handle custom headers', async () => { - const customHeaders = { - 'X-Custom-Header': 'test-value', - 'User-Agent': 'StockBot-HTTP-Client/1.0' - }; - - const response = await client.get(`${mockServerBaseUrl}/headers`, { - headers: customHeaders - }); - - expect(response.status).toBe(200); - expect(response.data.headers).toHaveProperty('x-custom-header', 'test-value'); - expect(response.data.headers).toHaveProperty('user-agent', 'StockBot-HTTP-Client/1.0'); - }); - - test('should handle timeout', async () => { - const clientWithTimeout = new HttpClient({ timeout: 1 }); // 1ms timeout - - await expect( - clientWithTimeout.get('https://httpbin.org/delay/1') - ).rejects.toThrow(); - }); - }); - describe('Error handling', () => { - test('should handle HTTP errors', async () => { - await expect( - client.get(`${mockServerBaseUrl}/status/404`) - ).rejects.toThrow(HttpError); - }); - - test('should handle network errors gracefully', async () => { - await expect( - client.get('https://nonexistent-domain-that-will-fail-12345.test') - ).rejects.toThrow(); - }); - - test('should handle invalid URLs', async () => { - await expect( - client.get('not:/a:valid/url') - ).rejects.toThrow(); - }); - }); - - describe('HTTP methods', () => { - test('should make PUT request', async () => { - const testData = { id: 1, name: 'Updated' }; - const response = await client.put(`${mockServerBaseUrl}/post`, testData); - expect(response.status).toBe(200); - }); - - test('should make DELETE request', async () => { - const response = await client.del(`${mockServerBaseUrl}/`); - expect(response.status).toBe(200); - expect(response.data.method).toBe('DELETE'); - }); - - test('should make PATCH request', async () => { - const testData = { name: 'Patched' }; - const response = await client.patch(`${mockServerBaseUrl}/post`, testData); - expect(response.status).toBe(200); - }); - }); -}); - -describe('ProxyManager', () => { - test('should determine when to use Bun fetch', () => { - const httpProxy: ProxyInfo = { - protocol: 'http', - host: 'proxy.example.com', - port: 8080 - }; - - const socksProxy: ProxyInfo = { - protocol: 'socks5', - host: 'proxy.example.com', - port: 1080 - }; - - expect(ProxyManager.shouldUseBunFetch(httpProxy)).toBe(true); - expect(ProxyManager.shouldUseBunFetch(socksProxy)).toBe(false); - }); - - test('should create proxy URL for Bun fetch', () => { - const proxy: ProxyInfo = { - protocol: 'http', - host: 'proxy.example.com', - port: 8080, - username: 'user', - password: 'pass' }; - - const proxyUrl = ProxyManager.createProxyUrl(proxy); - expect(proxyUrl).toBe('http://user:pass@proxy.example.com:8080'); - }); - - test('should create proxy URL without credentials', () => { - const proxy: ProxyInfo = { - protocol: 'https', - host: 'proxy.example.com', - port: 8080 }; - - const proxyUrl = ProxyManager.createProxyUrl(proxy); - expect(proxyUrl).toBe('https://proxy.example.com:8080'); - }); -}); +import { describe, test, expect, beforeEach, beforeAll, afterAll } from 'bun:test'; +import { HttpClient, HttpError, ProxyManager } from '../src/index'; +import type { ProxyInfo } from '../src/types'; +import { MockServer } from './mock-server'; + +// Global mock server instance +let mockServer: MockServer; +let mockServerBaseUrl: string; + +beforeAll(async () => { + // Start mock server for all tests + mockServer = new MockServer(); + await mockServer.start(); + mockServerBaseUrl = mockServer.getBaseUrl(); +}); + +afterAll(async () => { + // Stop mock server + await mockServer.stop(); +}); + +describe('HttpClient', () => { + let client: HttpClient; + + beforeEach(() => { + client = new HttpClient(); + }); + + describe('Basic functionality', () => { + test('should create client with default config', () => { + expect(client).toBeInstanceOf(HttpClient); + }); + + test('should make GET request', async () => { + const response = await client.get(`${mockServerBaseUrl}/`); + + expect(response.status).toBe(200); + expect(response.data).toHaveProperty('url'); + expect(response.data).toHaveProperty('method', 'GET'); + }); + + test('should make POST request with body', async () => { + const testData = { + title: 'Test Post', + body: 'Test body', + userId: 1, + }; + + const response = await client.post(`${mockServerBaseUrl}/post`, testData); + + expect(response.status).toBe(200); + expect(response.data).toHaveProperty('data'); + expect(response.data.data).toEqual(testData); + }); + + test('should handle custom headers', async () => { + const customHeaders = { + 'X-Custom-Header': 'test-value', + 'User-Agent': 'StockBot-HTTP-Client/1.0' + }; + + const response = await client.get(`${mockServerBaseUrl}/headers`, { + headers: customHeaders + }); + + expect(response.status).toBe(200); + expect(response.data.headers).toHaveProperty('x-custom-header', 'test-value'); + expect(response.data.headers).toHaveProperty('user-agent', 'StockBot-HTTP-Client/1.0'); + }); + + test('should handle timeout', async () => { + const clientWithTimeout = new HttpClient({ timeout: 1 }); // 1ms timeout + + await expect( + clientWithTimeout.get('https://httpbin.org/delay/1') + ).rejects.toThrow(); + }); + }); + describe('Error handling', () => { + test('should handle HTTP errors', async () => { + await expect( + client.get(`${mockServerBaseUrl}/status/404`) + ).rejects.toThrow(HttpError); + }); + + test('should handle network errors gracefully', async () => { + await expect( + client.get('https://nonexistent-domain-that-will-fail-12345.test') + ).rejects.toThrow(); + }); + + test('should handle invalid URLs', async () => { + await expect( + client.get('not:/a:valid/url') + ).rejects.toThrow(); + }); + }); + + describe('HTTP methods', () => { + test('should make PUT request', async () => { + const testData = { id: 1, name: 'Updated' }; + const response = await client.put(`${mockServerBaseUrl}/post`, testData); + expect(response.status).toBe(200); + }); + + test('should make DELETE request', async () => { + const response = await client.del(`${mockServerBaseUrl}/`); + expect(response.status).toBe(200); + expect(response.data.method).toBe('DELETE'); + }); + + test('should make PATCH request', async () => { + const testData = { name: 'Patched' }; + const response = await client.patch(`${mockServerBaseUrl}/post`, testData); + expect(response.status).toBe(200); + }); + }); +}); + +describe('ProxyManager', () => { + test('should determine when to use Bun fetch', () => { + const httpProxy: ProxyInfo = { + protocol: 'http', + host: 'proxy.example.com', + port: 8080 + }; + + const socksProxy: ProxyInfo = { + protocol: 'socks5', + host: 'proxy.example.com', + port: 1080 + }; + + expect(ProxyManager.shouldUseBunFetch(httpProxy)).toBe(true); + expect(ProxyManager.shouldUseBunFetch(socksProxy)).toBe(false); + }); + + test('should create proxy URL for Bun fetch', () => { + const proxy: ProxyInfo = { + protocol: 'http', + host: 'proxy.example.com', + port: 8080, + username: 'user', + password: 'pass' }; + + const proxyUrl = ProxyManager.createProxyUrl(proxy); + expect(proxyUrl).toBe('http://user:pass@proxy.example.com:8080'); + }); + + test('should create proxy URL without credentials', () => { + const proxy: ProxyInfo = { + protocol: 'https', + host: 'proxy.example.com', + port: 8080 }; + + const proxyUrl = ProxyManager.createProxyUrl(proxy); + expect(proxyUrl).toBe('https://proxy.example.com:8080'); + }); +}); diff --git a/libs/http/test/mock-server.test.ts b/libs/http/test/mock-server.test.ts index 9b56153..33d03da 100644 --- a/libs/http/test/mock-server.test.ts +++ b/libs/http/test/mock-server.test.ts @@ -1,131 +1,131 @@ -import { describe, test, expect, beforeAll, afterAll } from 'bun:test'; -import { MockServer } from './mock-server'; - -/** - * Tests for the MockServer utility - * Ensures our test infrastructure works correctly - */ - -describe('MockServer', () => { - let mockServer: MockServer; - let baseUrl: string; - - beforeAll(async () => { - mockServer = new MockServer(); - await mockServer.start(); - baseUrl = mockServer.getBaseUrl(); - }); - - afterAll(async () => { - await mockServer.stop(); - }); - - describe('Server lifecycle', () => { - test('should start and provide base URL', () => { - expect(baseUrl).toMatch(/^http:\/\/localhost:\d+$/); - expect(mockServer.getBaseUrl()).toBe(baseUrl); - }); - - test('should be reachable', async () => { - const response = await fetch(`${baseUrl}/`); - expect(response.ok).toBe(true); - }); - }); - - describe('Status endpoints', () => { - test('should return correct status codes', async () => { - const statusCodes = [200, 201, 400, 401, 403, 404, 500, 503]; - - for (const status of statusCodes) { - const response = await fetch(`${baseUrl}/status/${status}`); - expect(response.status).toBe(status); - } - }); - }); - - describe('Headers endpoint', () => { - test('should echo request headers', async () => { - const response = await fetch(`${baseUrl}/headers`, { - headers: { - 'X-Test-Header': 'test-value', - 'User-Agent': 'MockServer-Test' - } }); - - expect(response.ok).toBe(true); - const data = await response.json(); - expect(data.headers).toHaveProperty('x-test-header', 'test-value'); - expect(data.headers).toHaveProperty('user-agent', 'MockServer-Test'); - }); - }); - - describe('Basic auth endpoint', () => { - test('should authenticate valid credentials', async () => { - const username = 'testuser'; - const password = 'testpass'; - const credentials = btoa(`${username}:${password}`); - - const response = await fetch(`${baseUrl}/basic-auth/${username}/${password}`, { - headers: { - 'Authorization': `Basic ${credentials}` - } - }); - - expect(response.ok).toBe(true); - const data = await response.json(); - expect(data.authenticated).toBe(true); - expect(data.user).toBe(username); - }); - - test('should reject invalid credentials', async () => { - const credentials = btoa('wrong:credentials'); - - const response = await fetch(`${baseUrl}/basic-auth/user/pass`, { - headers: { - 'Authorization': `Basic ${credentials}` - } - }); - - expect(response.status).toBe(401); - }); - - test('should reject missing auth header', async () => { - const response = await fetch(`${baseUrl}/basic-auth/user/pass`); - expect(response.status).toBe(401); - }); - }); - - describe('POST endpoint', () => { - test('should echo POST data', async () => { - const testData = { - message: 'Hello, MockServer!', - timestamp: Date.now() - }; - - const response = await fetch(`${baseUrl}/post`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify(testData) - }); - - expect(response.ok).toBe(true); - const data = await response.json(); - expect(data.data).toEqual(testData); - expect(data.method).toBe('POST'); - expect(data.headers).toHaveProperty('content-type', 'application/json'); - }); - }); - - describe('Default endpoint', () => { - test('should return request information', async () => { - const response = await fetch(`${baseUrl}/unknown-endpoint`); - - expect(response.ok).toBe(true); - const data = await response.json(); - expect(data.url).toBe(`${baseUrl}/unknown-endpoint`); - expect(data.method).toBe('GET'); - expect(data.headers).toBeDefined(); - }); - }); -}); +import { describe, test, expect, beforeAll, afterAll } from 'bun:test'; +import { MockServer } from './mock-server'; + +/** + * Tests for the MockServer utility + * Ensures our test infrastructure works correctly + */ + +describe('MockServer', () => { + let mockServer: MockServer; + let baseUrl: string; + + beforeAll(async () => { + mockServer = new MockServer(); + await mockServer.start(); + baseUrl = mockServer.getBaseUrl(); + }); + + afterAll(async () => { + await mockServer.stop(); + }); + + describe('Server lifecycle', () => { + test('should start and provide base URL', () => { + expect(baseUrl).toMatch(/^http:\/\/localhost:\d+$/); + expect(mockServer.getBaseUrl()).toBe(baseUrl); + }); + + test('should be reachable', async () => { + const response = await fetch(`${baseUrl}/`); + expect(response.ok).toBe(true); + }); + }); + + describe('Status endpoints', () => { + test('should return correct status codes', async () => { + const statusCodes = [200, 201, 400, 401, 403, 404, 500, 503]; + + for (const status of statusCodes) { + const response = await fetch(`${baseUrl}/status/${status}`); + expect(response.status).toBe(status); + } + }); + }); + + describe('Headers endpoint', () => { + test('should echo request headers', async () => { + const response = await fetch(`${baseUrl}/headers`, { + headers: { + 'X-Test-Header': 'test-value', + 'User-Agent': 'MockServer-Test' + } }); + + expect(response.ok).toBe(true); + const data = await response.json(); + expect(data.headers).toHaveProperty('x-test-header', 'test-value'); + expect(data.headers).toHaveProperty('user-agent', 'MockServer-Test'); + }); + }); + + describe('Basic auth endpoint', () => { + test('should authenticate valid credentials', async () => { + const username = 'testuser'; + const password = 'testpass'; + const credentials = btoa(`${username}:${password}`); + + const response = await fetch(`${baseUrl}/basic-auth/${username}/${password}`, { + headers: { + 'Authorization': `Basic ${credentials}` + } + }); + + expect(response.ok).toBe(true); + const data = await response.json(); + expect(data.authenticated).toBe(true); + expect(data.user).toBe(username); + }); + + test('should reject invalid credentials', async () => { + const credentials = btoa('wrong:credentials'); + + const response = await fetch(`${baseUrl}/basic-auth/user/pass`, { + headers: { + 'Authorization': `Basic ${credentials}` + } + }); + + expect(response.status).toBe(401); + }); + + test('should reject missing auth header', async () => { + const response = await fetch(`${baseUrl}/basic-auth/user/pass`); + expect(response.status).toBe(401); + }); + }); + + describe('POST endpoint', () => { + test('should echo POST data', async () => { + const testData = { + message: 'Hello, MockServer!', + timestamp: Date.now() + }; + + const response = await fetch(`${baseUrl}/post`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(testData) + }); + + expect(response.ok).toBe(true); + const data = await response.json(); + expect(data.data).toEqual(testData); + expect(data.method).toBe('POST'); + expect(data.headers).toHaveProperty('content-type', 'application/json'); + }); + }); + + describe('Default endpoint', () => { + test('should return request information', async () => { + const response = await fetch(`${baseUrl}/unknown-endpoint`); + + expect(response.ok).toBe(true); + const data = await response.json(); + expect(data.url).toBe(`${baseUrl}/unknown-endpoint`); + expect(data.method).toBe('GET'); + expect(data.headers).toBeDefined(); + }); + }); +}); diff --git a/libs/http/test/mock-server.ts b/libs/http/test/mock-server.ts index e5c3d03..20b3c2c 100644 --- a/libs/http/test/mock-server.ts +++ b/libs/http/test/mock-server.ts @@ -1,114 +1,114 @@ -/** - * Mock HTTP server for testing the HTTP client - * Replaces external dependency on httpbin.org with a local server - */ -export class MockServer { - private server: ReturnType | null = null; - private port: number = 0; - - /** - * Start the mock server on a random port - */ - async start(): Promise { - this.server = Bun.serve({ - port: 1, // Use any available port - fetch: this.handleRequest.bind(this), - error: this.handleError.bind(this), - }); - - this.port = this.server.port || 1; - console.log(`Mock server started on port ${this.port}`); - } - - /** - * Stop the mock server - */ - async stop(): Promise { - if (this.server) { - this.server.stop(true); - this.server = null; - this.port = 0; - console.log('Mock server stopped'); - } - } - - /** - * Get the base URL of the mock server - */ - getBaseUrl(): string { - if (!this.server) { - throw new Error('Server not started'); - } - return `http://localhost:${this.port}`; - } - - /** - * Handle incoming requests - */ private async handleRequest(req: Request): Promise { - const url = new URL(req.url); - const path = url.pathname; - - console.log(`Mock server handling request: ${req.method} ${path}`); - - // Status endpoints - if (path.startsWith('/status/')) { - const status = parseInt(path.replace('/status/', ''), 10); - console.log(`Returning status: ${status}`); - return new Response(null, { status }); - } // Headers endpoint - if (path === '/headers') { - const headers = Object.fromEntries([...req.headers.entries()]); - console.log('Headers endpoint called, received headers:', headers); - return Response.json({ headers }); - } // Basic auth endpoint - if (path.startsWith('/basic-auth/')) { - const parts = path.split('/').filter(Boolean); - const expectedUsername = parts[1]; - const expectedPassword = parts[2]; - console.log(`Basic auth endpoint called: expected user=${expectedUsername}, pass=${expectedPassword}`); - - const authHeader = req.headers.get('authorization'); - if (!authHeader || !authHeader.startsWith('Basic ')) { - console.log('Missing or invalid Authorization header'); - return new Response('Unauthorized', { status: 401 }); - } - - const base64Credentials = authHeader.split(' ')[1]; - const credentials = atob(base64Credentials); - const [username, password] = credentials.split(':'); - - if (username === expectedUsername && password === expectedPassword) { - return Response.json({ - authenticated: true, - user: username - }); - } - - return new Response('Unauthorized', { status: 401 }); - } - - // Echo request body - if (path === '/post' && req.method === 'POST') { - const data = await req.json(); - return Response.json({ - data, - headers: Object.fromEntries([...req.headers.entries()]), - method: req.method - }); - } - - // Default response - return Response.json({ - url: req.url, - method: req.method, - headers: Object.fromEntries([...req.headers.entries()]) - }); - } - - /** - * Handle errors - */ - private handleError(error: Error): Response { - return new Response('Server error', { status: 500 }); - } -} +/** + * Mock HTTP server for testing the HTTP client + * Replaces external dependency on httpbin.org with a local server + */ +export class MockServer { + private server: ReturnType | null = null; + private port: number = 0; + + /** + * Start the mock server on a random port + */ + async start(): Promise { + this.server = Bun.serve({ + port: 1, // Use any available port + fetch: this.handleRequest.bind(this), + error: this.handleError.bind(this), + }); + + this.port = this.server.port || 1; + console.log(`Mock server started on port ${this.port}`); + } + + /** + * Stop the mock server + */ + async stop(): Promise { + if (this.server) { + this.server.stop(true); + this.server = null; + this.port = 0; + console.log('Mock server stopped'); + } + } + + /** + * Get the base URL of the mock server + */ + getBaseUrl(): string { + if (!this.server) { + throw new Error('Server not started'); + } + return `http://localhost:${this.port}`; + } + + /** + * Handle incoming requests + */ private async handleRequest(req: Request): Promise { + const url = new URL(req.url); + const path = url.pathname; + + console.log(`Mock server handling request: ${req.method} ${path}`); + + // Status endpoints + if (path.startsWith('/status/')) { + const status = parseInt(path.replace('/status/', ''), 10); + console.log(`Returning status: ${status}`); + return new Response(null, { status }); + } // Headers endpoint + if (path === '/headers') { + const headers = Object.fromEntries([...req.headers.entries()]); + console.log('Headers endpoint called, received headers:', headers); + return Response.json({ headers }); + } // Basic auth endpoint + if (path.startsWith('/basic-auth/')) { + const parts = path.split('/').filter(Boolean); + const expectedUsername = parts[1]; + const expectedPassword = parts[2]; + console.log(`Basic auth endpoint called: expected user=${expectedUsername}, pass=${expectedPassword}`); + + const authHeader = req.headers.get('authorization'); + if (!authHeader || !authHeader.startsWith('Basic ')) { + console.log('Missing or invalid Authorization header'); + return new Response('Unauthorized', { status: 401 }); + } + + const base64Credentials = authHeader.split(' ')[1]; + const credentials = atob(base64Credentials); + const [username, password] = credentials.split(':'); + + if (username === expectedUsername && password === expectedPassword) { + return Response.json({ + authenticated: true, + user: username + }); + } + + return new Response('Unauthorized', { status: 401 }); + } + + // Echo request body + if (path === '/post' && req.method === 'POST') { + const data = await req.json(); + return Response.json({ + data, + headers: Object.fromEntries([...req.headers.entries()]), + method: req.method + }); + } + + // Default response + return Response.json({ + url: req.url, + method: req.method, + headers: Object.fromEntries([...req.headers.entries()]) + }); + } + + /** + * Handle errors + */ + private handleError(error: Error): Response { + return new Response('Server error', { status: 500 }); + } +} diff --git a/libs/http/tsconfig.json b/libs/http/tsconfig.json index 977e389..01f4bc5 100644 --- a/libs/http/tsconfig.json +++ b/libs/http/tsconfig.json @@ -1,13 +1,13 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**/*", "**/tests/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../logger" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**/*", "**/tests/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../logger" } + ] +} diff --git a/libs/http/turbo.json b/libs/http/turbo.json index 7632db9..c630cca 100644 --- a/libs/http/turbo.json +++ b/libs/http/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/logger/README.md b/libs/logger/README.md index dcf996a..cd49f6e 100644 --- a/libs/logger/README.md +++ b/libs/logger/README.md @@ -1,337 +1,337 @@ -# @stock-bot/logger - -Enhanced logging library with Loki integration for the Stock Bot platform (June 2025). - -## Features - -- 🎯 **Multiple Log Levels**: debug, info, warn, error, http -- 🌐 **Loki Integration**: Centralized logging with Grafana visualization -- πŸ“ **File Logging**: Daily rotating log files with compression -- 🎨 **Console Logging**: Colored, formatted console output -- πŸ“Š **Structured Logging**: JSON-formatted logs with metadata -- ⚑ **Performance Optimized**: Batching and async logging -- πŸ” **Security**: Automatic sensitive data masking -- 🎭 **Express Middleware**: Request/response logging -- πŸ“ˆ **Business Events**: Specialized logging for trading operations - -## Installation - -```bash -# Using Bun (current runtime) -bun install -``` - -## Basic Usage - -### Simple Logging - -```typescript -import { getLogger } from '@stock-bot/logger'; - -const logger = getLogger('my-service'); - -logger.info('Service started'); -logger.warn('This is a warning'); -logger.error('An error occurred', new Error('Something went wrong')); -``` - -### With Context - -```typescript -import { getLogger } from '@stock-bot/logger'; - -const logger = getLogger('trading-service'); - -logger.info('Trade executed', { - symbol: 'AAPL', - quantity: 100, - price: 150.25, - userId: '12345', - sessionId: 'abc-def-ghi' -}); -``` - -### Performance Logging - -```typescript -import { getLogger, createTimer } from '@stock-bot/logger'; - -const logger = getLogger('data-processor'); -const timer = createTimer('data-processing'); - -// ... do some work ... - -const timing = timer.end(); -logger.performance('Data processing completed', timing); -``` - -### Business Events - -```typescript -import { getLogger, createBusinessEvent } from '@stock-bot/logger'; - -const logger = getLogger('order-service'); - -logger.business('Order placed', createBusinessEvent( - 'order', - 'place', - { - entity: 'order-123', - result: 'success', - symbol: 'TSLA', - amount: 50000 - } -)); -``` - -### Security Events - -```typescript -import { getLogger, createSecurityEvent } from '@stock-bot/logger'; - -const logger = getLogger('auth-service'); - -logger.security('Failed login attempt', createSecurityEvent( - 'authentication', - { - user: 'john@example.com', - result: 'failure', - ip: '192.168.1.100', - severity: 'medium' - } -)); -``` - -## Express Middleware - -### Basic Request Logging - -```typescript -import express from 'express'; -import { loggingMiddleware } from '@stock-bot/logger'; - -const app = express(); - -app.use(loggingMiddleware({ - serviceName: 'api-gateway', - skipPaths: ['/health', '/metrics'] -})); -``` - -### Error Logging - -```typescript -import { errorLoggingMiddleware, getLogger } from '@stock-bot/logger'; - -const logger = getLogger('api-gateway'); - -// Add after your routes but before error handlers -app.use(errorLoggingMiddleware(logger)); -``` - -### Request-scoped Logger - -```typescript -import { createRequestLogger, getLogger } from '@stock-bot/logger'; - -const baseLogger = getLogger('api-gateway'); - -app.use((req, res, next) => { - req.logger = createRequestLogger(req, baseLogger); - next(); -}); - -app.get('/api/data', (req, res) => { - req.logger.info('Processing data request'); - // ... handle request ... -}); -``` - -## Configuration - -The logger uses configuration from `@stock-bot/config`. Key environment variables: - -```bash -# Logging -LOG_LEVEL=info -LOG_CONSOLE=true -LOG_FILE=true -LOG_FILE_PATH=./logs - -# Loki -LOKI_HOST=localhost -LOKI_PORT=3100 -LOKI_BATCH_SIZE=1024 -``` - -## Advanced Usage - -### Child Loggers - -```typescript -import { getLogger } from '@stock-bot/logger'; - -const parentLogger = getLogger('trading-service'); -const orderLogger = parentLogger.child({ - module: 'order-processing', - orderId: '12345' -}); - -orderLogger.info('Order validated'); // Will include parent context -``` - -### Custom Configuration - -```typescript -import { getLogger } from '@stock-bot/logger'; - -// Uses standard getLogger with service-specific configuration -const logger = getLogger('custom-service'); -``` - -### Sensitive Data Masking - -```typescript -import { sanitizeMetadata, maskSensitiveData } from '@stock-bot/logger'; - -const unsafeData = { - username: 'john', - password: 'secret123', - apiKey: 'abc123def456' -}; - -const safeData = sanitizeMetadata(unsafeData); -// { username: 'john', password: '[REDACTED]', apiKey: '[REDACTED]' } - -const message = maskSensitiveData('User API key: abc123def456'); -// 'User API key: [API_KEY]' -``` - -### Log Throttling - -```typescript -import { LogThrottle } from '@stock-bot/logger'; - -const throttle = new LogThrottle(10, 60000); // 10 logs per minute - -if (throttle.shouldLog('error-key')) { - logger.error('This error will be throttled'); -} -``` - -## Viewing Logs - -### Grafana Dashboard - -1. Start the monitoring stack: `docker-compose up grafana loki` -2. Open Grafana at http://localhost:3000 -3. Use the "Stock Bot Logs" dashboard -4. Query logs with LogQL: `{service="your-service"}` - -### Log Files - -When file logging is enabled, logs are written to: -- `./logs/{service-name}-YYYY-MM-DD.log` - All logs -- `./logs/{service-name}-error-YYYY-MM-DD.log` - Error logs only - -## Best Practices - -1. **Use appropriate log levels**: - - `debug`: Detailed development information - - `info`: General operational messages - - `warn`: Potential issues - - `error`: Actual errors requiring attention - -2. **Include context**: Always provide relevant metadata - ```typescript - logger.info('Trade executed', { symbol, quantity, price, orderId }); - ``` - -3. **Use structured logging**: Avoid string concatenation - ```typescript - // Good - logger.info('User logged in', { userId, ip, userAgent }); - - // Avoid - logger.info(`User ${userId} logged in from ${ip}`); - ``` - -4. **Handle sensitive data**: Use sanitization utilities - ```typescript - const safeMetadata = sanitizeMetadata(requestData); - logger.info('API request', safeMetadata); - ``` - -5. **Use correlation IDs**: Track requests across services - ```typescript - const logger = getLogger('service').child({ - correlationId: req.headers['x-correlation-id'] - }); - ``` - -## Integration with Services - -To use in your service: - -1. Add dependency to your service's `package.json`: - ```json - { - "dependencies": { - "@stock-bot/logger": "*" - } - } - ``` - -2. Update your service's `tsconfig.json` references: - ```json - { - "references": [ - { "path": "../../../libs/logger" } - ] - } - ``` - -3. Import and use: - ```typescript - import { getLogger } from '@stock-bot/logger'; - - const logger = getLogger('my-service'); - ``` - -## Performance Considerations - -- Logs are batched and sent asynchronously to Loki -- File logging uses daily rotation to prevent large files -- Console logging can be disabled in production -- Use log throttling for high-frequency events -- Sensitive data is automatically masked - -## Troubleshooting - -### Logs not appearing in Loki - -1. Check Loki connection: - ```bash - curl http://localhost:3100/ready - ``` - -2. Verify environment variables: - ```bash - echo $LOKI_HOST $LOKI_PORT - ``` - -3. Check container logs: - ```bash - docker logs stock-bot-loki - ``` - -### High memory usage - -- Reduce `LOKI_BATCH_SIZE` if batching too many logs -- Disable file logging if not needed - -### Missing logs - -- Check log level configuration -- Verify service name matches expectations -- Ensure proper error handling around logger calls +# @stock-bot/logger + +Enhanced logging library with Loki integration for the Stock Bot platform (June 2025). + +## Features + +- 🎯 **Multiple Log Levels**: debug, info, warn, error, http +- 🌐 **Loki Integration**: Centralized logging with Grafana visualization +- πŸ“ **File Logging**: Daily rotating log files with compression +- 🎨 **Console Logging**: Colored, formatted console output +- πŸ“Š **Structured Logging**: JSON-formatted logs with metadata +- ⚑ **Performance Optimized**: Batching and async logging +- πŸ” **Security**: Automatic sensitive data masking +- 🎭 **Express Middleware**: Request/response logging +- πŸ“ˆ **Business Events**: Specialized logging for trading operations + +## Installation + +```bash +# Using Bun (current runtime) +bun install +``` + +## Basic Usage + +### Simple Logging + +```typescript +import { getLogger } from '@stock-bot/logger'; + +const logger = getLogger('my-service'); + +logger.info('Service started'); +logger.warn('This is a warning'); +logger.error('An error occurred', new Error('Something went wrong')); +``` + +### With Context + +```typescript +import { getLogger } from '@stock-bot/logger'; + +const logger = getLogger('trading-service'); + +logger.info('Trade executed', { + symbol: 'AAPL', + quantity: 100, + price: 150.25, + userId: '12345', + sessionId: 'abc-def-ghi' +}); +``` + +### Performance Logging + +```typescript +import { getLogger, createTimer } from '@stock-bot/logger'; + +const logger = getLogger('data-processor'); +const timer = createTimer('data-processing'); + +// ... do some work ... + +const timing = timer.end(); +logger.performance('Data processing completed', timing); +``` + +### Business Events + +```typescript +import { getLogger, createBusinessEvent } from '@stock-bot/logger'; + +const logger = getLogger('order-service'); + +logger.business('Order placed', createBusinessEvent( + 'order', + 'place', + { + entity: 'order-123', + result: 'success', + symbol: 'TSLA', + amount: 50000 + } +)); +``` + +### Security Events + +```typescript +import { getLogger, createSecurityEvent } from '@stock-bot/logger'; + +const logger = getLogger('auth-service'); + +logger.security('Failed login attempt', createSecurityEvent( + 'authentication', + { + user: 'john@example.com', + result: 'failure', + ip: '192.168.1.100', + severity: 'medium' + } +)); +``` + +## Express Middleware + +### Basic Request Logging + +```typescript +import express from 'express'; +import { loggingMiddleware } from '@stock-bot/logger'; + +const app = express(); + +app.use(loggingMiddleware({ + serviceName: 'api-gateway', + skipPaths: ['/health', '/metrics'] +})); +``` + +### Error Logging + +```typescript +import { errorLoggingMiddleware, getLogger } from '@stock-bot/logger'; + +const logger = getLogger('api-gateway'); + +// Add after your routes but before error handlers +app.use(errorLoggingMiddleware(logger)); +``` + +### Request-scoped Logger + +```typescript +import { createRequestLogger, getLogger } from '@stock-bot/logger'; + +const baseLogger = getLogger('api-gateway'); + +app.use((req, res, next) => { + req.logger = createRequestLogger(req, baseLogger); + next(); +}); + +app.get('/api/data', (req, res) => { + req.logger.info('Processing data request'); + // ... handle request ... +}); +``` + +## Configuration + +The logger uses configuration from `@stock-bot/config`. Key environment variables: + +```bash +# Logging +LOG_LEVEL=info +LOG_CONSOLE=true +LOG_FILE=true +LOG_FILE_PATH=./logs + +# Loki +LOKI_HOST=localhost +LOKI_PORT=3100 +LOKI_BATCH_SIZE=1024 +``` + +## Advanced Usage + +### Child Loggers + +```typescript +import { getLogger } from '@stock-bot/logger'; + +const parentLogger = getLogger('trading-service'); +const orderLogger = parentLogger.child({ + module: 'order-processing', + orderId: '12345' +}); + +orderLogger.info('Order validated'); // Will include parent context +``` + +### Custom Configuration + +```typescript +import { getLogger } from '@stock-bot/logger'; + +// Uses standard getLogger with service-specific configuration +const logger = getLogger('custom-service'); +``` + +### Sensitive Data Masking + +```typescript +import { sanitizeMetadata, maskSensitiveData } from '@stock-bot/logger'; + +const unsafeData = { + username: 'john', + password: 'secret123', + apiKey: 'abc123def456' +}; + +const safeData = sanitizeMetadata(unsafeData); +// { username: 'john', password: '[REDACTED]', apiKey: '[REDACTED]' } + +const message = maskSensitiveData('User API key: abc123def456'); +// 'User API key: [API_KEY]' +``` + +### Log Throttling + +```typescript +import { LogThrottle } from '@stock-bot/logger'; + +const throttle = new LogThrottle(10, 60000); // 10 logs per minute + +if (throttle.shouldLog('error-key')) { + logger.error('This error will be throttled'); +} +``` + +## Viewing Logs + +### Grafana Dashboard + +1. Start the monitoring stack: `docker-compose up grafana loki` +2. Open Grafana at http://localhost:3000 +3. Use the "Stock Bot Logs" dashboard +4. Query logs with LogQL: `{service="your-service"}` + +### Log Files + +When file logging is enabled, logs are written to: +- `./logs/{service-name}-YYYY-MM-DD.log` - All logs +- `./logs/{service-name}-error-YYYY-MM-DD.log` - Error logs only + +## Best Practices + +1. **Use appropriate log levels**: + - `debug`: Detailed development information + - `info`: General operational messages + - `warn`: Potential issues + - `error`: Actual errors requiring attention + +2. **Include context**: Always provide relevant metadata + ```typescript + logger.info('Trade executed', { symbol, quantity, price, orderId }); + ``` + +3. **Use structured logging**: Avoid string concatenation + ```typescript + // Good + logger.info('User logged in', { userId, ip, userAgent }); + + // Avoid + logger.info(`User ${userId} logged in from ${ip}`); + ``` + +4. **Handle sensitive data**: Use sanitization utilities + ```typescript + const safeMetadata = sanitizeMetadata(requestData); + logger.info('API request', safeMetadata); + ``` + +5. **Use correlation IDs**: Track requests across services + ```typescript + const logger = getLogger('service').child({ + correlationId: req.headers['x-correlation-id'] + }); + ``` + +## Integration with Services + +To use in your service: + +1. Add dependency to your service's `package.json`: + ```json + { + "dependencies": { + "@stock-bot/logger": "*" + } + } + ``` + +2. Update your service's `tsconfig.json` references: + ```json + { + "references": [ + { "path": "../../../libs/logger" } + ] + } + ``` + +3. Import and use: + ```typescript + import { getLogger } from '@stock-bot/logger'; + + const logger = getLogger('my-service'); + ``` + +## Performance Considerations + +- Logs are batched and sent asynchronously to Loki +- File logging uses daily rotation to prevent large files +- Console logging can be disabled in production +- Use log throttling for high-frequency events +- Sensitive data is automatically masked + +## Troubleshooting + +### Logs not appearing in Loki + +1. Check Loki connection: + ```bash + curl http://localhost:3100/ready + ``` + +2. Verify environment variables: + ```bash + echo $LOKI_HOST $LOKI_PORT + ``` + +3. Check container logs: + ```bash + docker logs stock-bot-loki + ``` + +### High memory usage + +- Reduce `LOKI_BATCH_SIZE` if batching too many logs +- Disable file logging if not needed + +### Missing logs + +- Check log level configuration +- Verify service name matches expectations +- Ensure proper error handling around logger calls diff --git a/libs/logger/bunfig.toml b/libs/logger/bunfig.toml index 7633a4d..cd07ed5 100644 --- a/libs/logger/bunfig.toml +++ b/libs/logger/bunfig.toml @@ -1,18 +1,18 @@ -# Logger library Bun configuration - -[test] -# Configure coverage and test behavior -coverage = true -timeout = "30s" - -# Configure test environment -preload = ["./test/setup.ts"] - -# Environment variables for tests -[test.env] -NODE_ENV = "test" -LOG_LEVEL = "silent" -LOG_CONSOLE = "false" -LOG_FILE = "false" -LOKI_HOST = "" -LOKI_URL = "" +# Logger library Bun configuration + +[test] +# Configure coverage and test behavior +coverage = true +timeout = "30s" + +# Configure test environment +preload = ["./test/setup.ts"] + +# Environment variables for tests +[test.env] +NODE_ENV = "test" +LOG_LEVEL = "silent" +LOG_CONSOLE = "false" +LOG_FILE = "false" +LOKI_HOST = "" +LOKI_URL = "" diff --git a/libs/logger/src/index.ts b/libs/logger/src/index.ts index bd888f5..b4a521e 100644 --- a/libs/logger/src/index.ts +++ b/libs/logger/src/index.ts @@ -1,18 +1,18 @@ -/** - * @stock-bot/logger - Simplified logging library - * - * Main exports for the logger library - */ - -// Core logger classes and functions -export { - Logger, - getLogger, - shutdownLoggers -} from './logger'; - -// Type definitions -export type { LogLevel, LogContext, LogMetadata } from './types'; - -// Default export -export { getLogger as default } from './logger'; +/** + * @stock-bot/logger - Simplified logging library + * + * Main exports for the logger library + */ + +// Core logger classes and functions +export { + Logger, + getLogger, + shutdownLoggers +} from './logger'; + +// Type definitions +export type { LogLevel, LogContext, LogMetadata } from './types'; + +// Default export +export { getLogger as default } from './logger'; diff --git a/libs/logger/src/logger.ts b/libs/logger/src/logger.ts index fa1a585..03318ca 100644 --- a/libs/logger/src/logger.ts +++ b/libs/logger/src/logger.ts @@ -1,271 +1,271 @@ -/** - * Simplified Pino-based logger for Stock Bot platform - * - * Features: - * - High performance JSON logging with Pino - * - Console, file, and Loki transports - * - Structured logging with metadata - * - Service-specific context - */ - -import pino from 'pino'; -import { loggingConfig, lokiConfig } from '@stock-bot/config'; -import type { LogLevel, LogContext, LogMetadata } from './types'; - -// Simple cache for logger instances -const loggerCache = new Map(); -console.log('Logger cache initialized: ', loggingConfig.LOG_LEVEL); -/** - * Create transport configuration - */ -function createTransports(serviceName: string): any { - const targets: any[] = []; - // const isDev = loggingConfig.LOG_ENVIRONMENT === 'development'; - // Console transport - if (loggingConfig.LOG_CONSOLE) { - targets.push({ - target: 'pino-pretty', - level: loggingConfig.LOG_LEVEL, // Only show errors on console - options: { - colorize: true, - translateTime: 'yyyy-mm-dd HH:MM:ss.l', - messageFormat: '[{service}{childName}] {msg}', - singleLine: true, - hideObject: false, - ignore: 'pid,hostname,service,environment,version,childName', - errorLikeObjectKeys: ['err', 'error'], - errorProps: 'message,stack,name,code', - } - }); - } - - // File transport - if (loggingConfig.LOG_FILE) { - targets.push({ - target: 'pino/file', - level: loggingConfig.LOG_LEVEL, - options: { - destination: `${loggingConfig.LOG_FILE_PATH}/${serviceName}.log`, - mkdir: true - } - }); - } - - // Loki transport - if (lokiConfig.LOKI_HOST) { - targets.push({ - target: 'pino-loki', - level: loggingConfig.LOG_LEVEL, - options: { - host: lokiConfig.LOKI_URL || `http://${lokiConfig.LOKI_HOST}:${lokiConfig.LOKI_PORT}`, - labels: { - service: serviceName, - environment: lokiConfig.LOKI_ENVIRONMENT_LABEL - }, - ignore: 'childName', - } - }); - } - - return targets.length > 0 ? { targets } : null; -} - -/** - * Get or create pino logger - */ -function getPinoLogger(serviceName: string): pino.Logger { - if (!loggerCache.has(serviceName)) { - const transport = createTransports(serviceName); - - const config: pino.LoggerOptions = { - level: loggingConfig.LOG_LEVEL, - base: { - service: serviceName, - environment: loggingConfig.LOG_ENVIRONMENT, - version: loggingConfig.LOG_SERVICE_VERSION - } - }; - - if (transport) { - config.transport = transport; - } - - loggerCache.set(serviceName, pino(config)); - } - - return loggerCache.get(serviceName)!; -} - - -/** - * Simplified Logger class - */ -export class Logger { - private pino: pino.Logger; - private context: LogContext; - private serviceName: string; - private childName?: string; - - constructor(serviceName: string, context: LogContext = {}) { - this.pino = getPinoLogger(serviceName); - this.context = context; - this.serviceName = serviceName; - } - - /** - * Core log method - */ - private log(level: LogLevel, message: string | object, metadata?: LogMetadata): void { - const data = { ...this.context, ...metadata }; - - if (typeof message === 'string') { - (this.pino as any)[level](data, message); - } else { - (this.pino as any)[level]({ ...data, data: message }, 'Object logged'); - } - } - - // Simple log level methods - debug(message: string | object, metadata?: LogMetadata): void { - this.log('debug', message, metadata); - } - - info(message: string | object, metadata?: LogMetadata): void { - this.log('info', message, metadata); - } - - warn(message: string | object, metadata?: LogMetadata): void { - this.log('warn', message, metadata); - } - - error(message: string | object, metadata?: LogMetadata & { error?: any } | unknown): void { - let data: any = {}; - - // Handle metadata parameter normalization - if (metadata instanceof Error) { - // Direct Error object as metadata - data = { error: metadata }; - } else if (metadata !== null && typeof metadata === 'object') { - // Object metadata (including arrays, but not null) - data = { ...metadata }; - } else if (metadata !== undefined) { - // Primitive values (string, number, boolean, etc.) - data = { metadata }; - } - - // Handle multiple error properties in metadata - const errorKeys = ['error', 'err', 'primaryError', 'secondaryError']; - errorKeys.forEach(key => { - if (data[key]) { - const normalizedKey = key === 'error' ? 'err' : `${key}_normalized`; - data[normalizedKey] = this.normalizeError(data[key]); - - // Only delete the original 'error' key to maintain other error properties - if (key === 'error') { - delete data.error; - } - } - }); - - this.log('error', message, data); - } - - /** - * Normalize any error type to a structured format - */ - private normalizeError(error: any): any { - if (error instanceof Error) { - return { - name: error.name, - message: error.message, - stack: error.stack, - }; - } - - if (error && typeof error === 'object') { - // Handle error-like objects - return { - name: error.name || 'UnknownError', - message: error.message || error.toString(), - ...(error.stack && { stack: error.stack }), - ...(error.code && { code: error.code }), - ...(error.status && { status: error.status }) - }; - } - - // Handle primitives (string, number, etc.) - return { - name: 'UnknownError', - message: String(error) - }; - } - /** - * Create child logger with additional context - */ - child(serviceName: string, context?: LogContext): Logger { - // Create child logger that shares the same pino instance with additional context - const childLogger = Object.create(Logger.prototype); - childLogger.serviceName = this.serviceName; - childLogger.childName = serviceName; - childLogger.context = { ...this.context, ...context }; - const childBindings = { - service: this.serviceName, - childName: ' -> ' + serviceName, - ...(context || childLogger.context) - }; - - childLogger.pino = this.pino.child(childBindings); - return childLogger; - // } - // childLogger.pino = this.pino.child(context || childLogger.context); // Let pino handle level inheritance naturally - // return childLogger; - } - - // Getters for service and context - getServiceName(): string { - return this.serviceName; - } - getChildName(): string | undefined { - return this.childName; - } -} - -/** - * Main factory function - */ -export function getLogger(serviceName: string, context?: LogContext): Logger { - return new Logger(serviceName, context); -} - -/** - * Gracefully shutdown all logger instances - * This should be called during application shutdown to ensure all logs are flushed - */ -export async function shutdownLoggers(): Promise { - const flushPromises = Array.from(loggerCache.values()).map(logger => { - return new Promise((resolve) => { - if (typeof logger.flush === 'function') { - logger.flush((err) => { - if (err) { - console.error('Logger flush error:', err); - } - resolve(); - }); - } else { - resolve(); - } - }); - }); - - try { - await Promise.allSettled(flushPromises); - console.log('All loggers flushed successfully'); - } catch (error) { - console.error('Logger flush failed:', error); - } finally { - loggerCache.clear(); - } -} - -// Export types for convenience -export type { LogLevel, LogContext, LogMetadata } from './types'; +/** + * Simplified Pino-based logger for Stock Bot platform + * + * Features: + * - High performance JSON logging with Pino + * - Console, file, and Loki transports + * - Structured logging with metadata + * - Service-specific context + */ + +import pino from 'pino'; +import { loggingConfig, lokiConfig } from '@stock-bot/config'; +import type { LogLevel, LogContext, LogMetadata } from './types'; + +// Simple cache for logger instances +const loggerCache = new Map(); +console.log('Logger cache initialized: ', loggingConfig.LOG_LEVEL); +/** + * Create transport configuration + */ +function createTransports(serviceName: string): any { + const targets: any[] = []; + // const isDev = loggingConfig.LOG_ENVIRONMENT === 'development'; + // Console transport + if (loggingConfig.LOG_CONSOLE) { + targets.push({ + target: 'pino-pretty', + level: loggingConfig.LOG_LEVEL, // Only show errors on console + options: { + colorize: true, + translateTime: 'yyyy-mm-dd HH:MM:ss.l', + messageFormat: '[{service}{childName}] {msg}', + singleLine: true, + hideObject: false, + ignore: 'pid,hostname,service,environment,version,childName', + errorLikeObjectKeys: ['err', 'error'], + errorProps: 'message,stack,name,code', + } + }); + } + + // File transport + if (loggingConfig.LOG_FILE) { + targets.push({ + target: 'pino/file', + level: loggingConfig.LOG_LEVEL, + options: { + destination: `${loggingConfig.LOG_FILE_PATH}/${serviceName}.log`, + mkdir: true + } + }); + } + + // Loki transport + if (lokiConfig.LOKI_HOST) { + targets.push({ + target: 'pino-loki', + level: loggingConfig.LOG_LEVEL, + options: { + host: lokiConfig.LOKI_URL || `http://${lokiConfig.LOKI_HOST}:${lokiConfig.LOKI_PORT}`, + labels: { + service: serviceName, + environment: lokiConfig.LOKI_ENVIRONMENT_LABEL + }, + ignore: 'childName', + } + }); + } + + return targets.length > 0 ? { targets } : null; +} + +/** + * Get or create pino logger + */ +function getPinoLogger(serviceName: string): pino.Logger { + if (!loggerCache.has(serviceName)) { + const transport = createTransports(serviceName); + + const config: pino.LoggerOptions = { + level: loggingConfig.LOG_LEVEL, + base: { + service: serviceName, + environment: loggingConfig.LOG_ENVIRONMENT, + version: loggingConfig.LOG_SERVICE_VERSION + } + }; + + if (transport) { + config.transport = transport; + } + + loggerCache.set(serviceName, pino(config)); + } + + return loggerCache.get(serviceName)!; +} + + +/** + * Simplified Logger class + */ +export class Logger { + private pino: pino.Logger; + private context: LogContext; + private serviceName: string; + private childName?: string; + + constructor(serviceName: string, context: LogContext = {}) { + this.pino = getPinoLogger(serviceName); + this.context = context; + this.serviceName = serviceName; + } + + /** + * Core log method + */ + private log(level: LogLevel, message: string | object, metadata?: LogMetadata): void { + const data = { ...this.context, ...metadata }; + + if (typeof message === 'string') { + (this.pino as any)[level](data, message); + } else { + (this.pino as any)[level]({ ...data, data: message }, 'Object logged'); + } + } + + // Simple log level methods + debug(message: string | object, metadata?: LogMetadata): void { + this.log('debug', message, metadata); + } + + info(message: string | object, metadata?: LogMetadata): void { + this.log('info', message, metadata); + } + + warn(message: string | object, metadata?: LogMetadata): void { + this.log('warn', message, metadata); + } + + error(message: string | object, metadata?: LogMetadata & { error?: any } | unknown): void { + let data: any = {}; + + // Handle metadata parameter normalization + if (metadata instanceof Error) { + // Direct Error object as metadata + data = { error: metadata }; + } else if (metadata !== null && typeof metadata === 'object') { + // Object metadata (including arrays, but not null) + data = { ...metadata }; + } else if (metadata !== undefined) { + // Primitive values (string, number, boolean, etc.) + data = { metadata }; + } + + // Handle multiple error properties in metadata + const errorKeys = ['error', 'err', 'primaryError', 'secondaryError']; + errorKeys.forEach(key => { + if (data[key]) { + const normalizedKey = key === 'error' ? 'err' : `${key}_normalized`; + data[normalizedKey] = this.normalizeError(data[key]); + + // Only delete the original 'error' key to maintain other error properties + if (key === 'error') { + delete data.error; + } + } + }); + + this.log('error', message, data); + } + + /** + * Normalize any error type to a structured format + */ + private normalizeError(error: any): any { + if (error instanceof Error) { + return { + name: error.name, + message: error.message, + stack: error.stack, + }; + } + + if (error && typeof error === 'object') { + // Handle error-like objects + return { + name: error.name || 'UnknownError', + message: error.message || error.toString(), + ...(error.stack && { stack: error.stack }), + ...(error.code && { code: error.code }), + ...(error.status && { status: error.status }) + }; + } + + // Handle primitives (string, number, etc.) + return { + name: 'UnknownError', + message: String(error) + }; + } + /** + * Create child logger with additional context + */ + child(serviceName: string, context?: LogContext): Logger { + // Create child logger that shares the same pino instance with additional context + const childLogger = Object.create(Logger.prototype); + childLogger.serviceName = this.serviceName; + childLogger.childName = serviceName; + childLogger.context = { ...this.context, ...context }; + const childBindings = { + service: this.serviceName, + childName: ' -> ' + serviceName, + ...(context || childLogger.context) + }; + + childLogger.pino = this.pino.child(childBindings); + return childLogger; + // } + // childLogger.pino = this.pino.child(context || childLogger.context); // Let pino handle level inheritance naturally + // return childLogger; + } + + // Getters for service and context + getServiceName(): string { + return this.serviceName; + } + getChildName(): string | undefined { + return this.childName; + } +} + +/** + * Main factory function + */ +export function getLogger(serviceName: string, context?: LogContext): Logger { + return new Logger(serviceName, context); +} + +/** + * Gracefully shutdown all logger instances + * This should be called during application shutdown to ensure all logs are flushed + */ +export async function shutdownLoggers(): Promise { + const flushPromises = Array.from(loggerCache.values()).map(logger => { + return new Promise((resolve) => { + if (typeof logger.flush === 'function') { + logger.flush((err) => { + if (err) { + console.error('Logger flush error:', err); + } + resolve(); + }); + } else { + resolve(); + } + }); + }); + + try { + await Promise.allSettled(flushPromises); + console.log('All loggers flushed successfully'); + } catch (error) { + console.error('Logger flush failed:', error); + } finally { + loggerCache.clear(); + } +} + +// Export types for convenience +export type { LogLevel, LogContext, LogMetadata } from './types'; diff --git a/libs/logger/src/types.ts b/libs/logger/src/types.ts index 5d4e298..b6b70b9 100644 --- a/libs/logger/src/types.ts +++ b/libs/logger/src/types.ts @@ -1,16 +1,16 @@ -/** - * Simplified type definitions for the logger library - */ - -// Standard log levels (simplified to pino defaults) -export type LogLevel = 'debug' | 'info' | 'warn' | 'error'; - -// Context that persists across log calls -export interface LogContext { - [key: string]: any; -} - -// Metadata for individual log entries -export interface LogMetadata { - [key: string]: any; -} +/** + * Simplified type definitions for the logger library + */ + +// Standard log levels (simplified to pino defaults) +export type LogLevel = 'debug' | 'info' | 'warn' | 'error'; + +// Context that persists across log calls +export interface LogContext { + [key: string]: any; +} + +// Metadata for individual log entries +export interface LogMetadata { + [key: string]: any; +} diff --git a/libs/logger/test/advanced.test.ts b/libs/logger/test/advanced.test.ts index 889c2ae..de5130c 100644 --- a/libs/logger/test/advanced.test.ts +++ b/libs/logger/test/advanced.test.ts @@ -1,200 +1,200 @@ -/** - * Advanced Logger Tests - * - * Tests for advanced logger functionality including complex metadata handling, - * child loggers, and advanced error scenarios. - */ - -import { describe, it, expect, beforeEach, afterEach } from 'bun:test'; -import { Logger, shutdownLoggers } from '../src'; -import { loggerTestHelpers } from './setup'; - -describe('Advanced Logger Features', () => { - let logger: Logger; - let testLoggerInstance: ReturnType; - - beforeEach(() => { - testLoggerInstance = loggerTestHelpers.createTestLogger('advanced-features'); - logger = testLoggerInstance.logger; - }); afterEach(async () => { - testLoggerInstance.clearCapturedLogs(); - // Clear any global logger cache - await shutdownLoggers(); - }); - - describe('Complex Metadata Handling', () => { - it('should handle nested metadata objects', () => { - const complexMetadata = { - user: { id: '123', name: 'John Doe' }, - session: { id: 'sess-456', timeout: 3600 }, - request: { method: 'POST', path: '/api/test' } - }; - - logger.info('Complex operation', complexMetadata); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].user).toEqual({ id: '123', name: 'John Doe' }); - expect(logs[0].session).toEqual({ id: 'sess-456', timeout: 3600 }); - expect(logs[0].request).toEqual({ method: 'POST', path: '/api/test' }); - }); - - it('should handle arrays in metadata', () => { - const arrayMetadata = { - tags: ['user', 'authentication', 'success'], - ids: [1, 2, 3, 4] - }; - - logger.info('Array metadata test', arrayMetadata); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].tags).toEqual(['user', 'authentication', 'success']); - expect(logs[0].ids).toEqual([1, 2, 3, 4]); - }); - - it('should handle null and undefined metadata values', () => { - const nullMetadata = { - nullValue: null, - undefinedValue: undefined, - emptyString: '', - zeroValue: 0 - }; - - logger.info('Null metadata test', nullMetadata); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].nullValue).toBe(null); - expect(logs[0].emptyString).toBe(''); - expect(logs[0].zeroValue).toBe(0); - }); - }); - - describe('Child Logger Functionality', () => { - it('should create child logger with additional context', () => { - const childLogger = logger.child({ - component: 'auth-service', - version: '1.2.3' - }); - - childLogger.info('Child logger message'); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].component).toBe('auth-service'); - expect(logs[0].version).toBe('1.2.3'); - expect(logs[0].msg).toBe('Child logger message'); - }); - - it('should support nested child loggers', () => { - const childLogger = logger.child({ level1: 'parent' }); - const grandChildLogger = childLogger.child({ level2: 'child' }); - - grandChildLogger.warn('Nested child message'); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level1).toBe('parent'); - expect(logs[0].level2).toBe('child'); - expect(logs[0].level).toBe('warn'); - }); - - it('should merge child context with log metadata', () => { - const childLogger = logger.child({ service: 'api' }); - - childLogger.info('Request processed', { - requestId: 'req-789', - duration: 150 - }); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].service).toBe('api'); - expect(logs[0].requestId).toBe('req-789'); - expect(logs[0].duration).toBe(150); - }); - }); - - describe('Advanced Error Handling', () => { - it('should handle Error objects with custom properties', () => { - const customError = new Error('Custom error message'); - (customError as any).code = 'ERR_CUSTOM'; - (customError as any).statusCode = 500; - - logger.error('Custom error occurred', { error: customError }); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('error'); - expect(logs[0].msg).toBe('Custom error occurred'); - }); - - it('should handle multiple errors in metadata', () => { - const error1 = new Error('First error'); - const error2 = new Error('Second error'); - - logger.error('Multiple errors', { - primaryError: error1, - secondaryError: error2, - context: 'batch processing' - }); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].context).toBe('batch processing'); - }); - it('should handle error objects with circular references', () => { - const errorWithCircular: any = { name: 'CircularError', message: 'Circular reference error' }; - // Create a simple circular reference - errorWithCircular.self = errorWithCircular; - - // Should not throw when logging circular references - expect(() => { - logger.error('Circular error test', { error: errorWithCircular }); - }).not.toThrow(); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('error'); - - // Clean up circular reference to prevent memory issues - delete errorWithCircular.self; - }); - }); - describe('Performance and Edge Cases', () => { - it('should handle moderate metadata objects', () => { - const moderateMetadata: any = {}; - for (let i = 0; i < 10; i++) { - moderateMetadata[`key${i}`] = `value${i}`; - } - - logger.debug('Moderate metadata test', moderateMetadata); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].key0).toBe('value0'); - expect(logs[0].key9).toBe('value9'); - }); - - it('should handle special characters in messages', () => { - const specialMessage = 'Special chars: πŸš€ Γ± ΓΌ'; - - logger.info(specialMessage); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].msg).toBe(specialMessage); - }); - - it('should handle empty and whitespace-only messages', () => { - logger.info(''); - logger.info(' '); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(2); - expect(logs[0].msg).toBe(''); - expect(logs[1].msg).toBe(' '); - }); - }); -}); +/** + * Advanced Logger Tests + * + * Tests for advanced logger functionality including complex metadata handling, + * child loggers, and advanced error scenarios. + */ + +import { describe, it, expect, beforeEach, afterEach } from 'bun:test'; +import { Logger, shutdownLoggers } from '../src'; +import { loggerTestHelpers } from './setup'; + +describe('Advanced Logger Features', () => { + let logger: Logger; + let testLoggerInstance: ReturnType; + + beforeEach(() => { + testLoggerInstance = loggerTestHelpers.createTestLogger('advanced-features'); + logger = testLoggerInstance.logger; + }); afterEach(async () => { + testLoggerInstance.clearCapturedLogs(); + // Clear any global logger cache + await shutdownLoggers(); + }); + + describe('Complex Metadata Handling', () => { + it('should handle nested metadata objects', () => { + const complexMetadata = { + user: { id: '123', name: 'John Doe' }, + session: { id: 'sess-456', timeout: 3600 }, + request: { method: 'POST', path: '/api/test' } + }; + + logger.info('Complex operation', complexMetadata); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].user).toEqual({ id: '123', name: 'John Doe' }); + expect(logs[0].session).toEqual({ id: 'sess-456', timeout: 3600 }); + expect(logs[0].request).toEqual({ method: 'POST', path: '/api/test' }); + }); + + it('should handle arrays in metadata', () => { + const arrayMetadata = { + tags: ['user', 'authentication', 'success'], + ids: [1, 2, 3, 4] + }; + + logger.info('Array metadata test', arrayMetadata); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].tags).toEqual(['user', 'authentication', 'success']); + expect(logs[0].ids).toEqual([1, 2, 3, 4]); + }); + + it('should handle null and undefined metadata values', () => { + const nullMetadata = { + nullValue: null, + undefinedValue: undefined, + emptyString: '', + zeroValue: 0 + }; + + logger.info('Null metadata test', nullMetadata); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].nullValue).toBe(null); + expect(logs[0].emptyString).toBe(''); + expect(logs[0].zeroValue).toBe(0); + }); + }); + + describe('Child Logger Functionality', () => { + it('should create child logger with additional context', () => { + const childLogger = logger.child({ + component: 'auth-service', + version: '1.2.3' + }); + + childLogger.info('Child logger message'); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].component).toBe('auth-service'); + expect(logs[0].version).toBe('1.2.3'); + expect(logs[0].msg).toBe('Child logger message'); + }); + + it('should support nested child loggers', () => { + const childLogger = logger.child({ level1: 'parent' }); + const grandChildLogger = childLogger.child({ level2: 'child' }); + + grandChildLogger.warn('Nested child message'); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level1).toBe('parent'); + expect(logs[0].level2).toBe('child'); + expect(logs[0].level).toBe('warn'); + }); + + it('should merge child context with log metadata', () => { + const childLogger = logger.child({ service: 'api' }); + + childLogger.info('Request processed', { + requestId: 'req-789', + duration: 150 + }); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].service).toBe('api'); + expect(logs[0].requestId).toBe('req-789'); + expect(logs[0].duration).toBe(150); + }); + }); + + describe('Advanced Error Handling', () => { + it('should handle Error objects with custom properties', () => { + const customError = new Error('Custom error message'); + (customError as any).code = 'ERR_CUSTOM'; + (customError as any).statusCode = 500; + + logger.error('Custom error occurred', { error: customError }); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('error'); + expect(logs[0].msg).toBe('Custom error occurred'); + }); + + it('should handle multiple errors in metadata', () => { + const error1 = new Error('First error'); + const error2 = new Error('Second error'); + + logger.error('Multiple errors', { + primaryError: error1, + secondaryError: error2, + context: 'batch processing' + }); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].context).toBe('batch processing'); + }); + it('should handle error objects with circular references', () => { + const errorWithCircular: any = { name: 'CircularError', message: 'Circular reference error' }; + // Create a simple circular reference + errorWithCircular.self = errorWithCircular; + + // Should not throw when logging circular references + expect(() => { + logger.error('Circular error test', { error: errorWithCircular }); + }).not.toThrow(); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('error'); + + // Clean up circular reference to prevent memory issues + delete errorWithCircular.self; + }); + }); + describe('Performance and Edge Cases', () => { + it('should handle moderate metadata objects', () => { + const moderateMetadata: any = {}; + for (let i = 0; i < 10; i++) { + moderateMetadata[`key${i}`] = `value${i}`; + } + + logger.debug('Moderate metadata test', moderateMetadata); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].key0).toBe('value0'); + expect(logs[0].key9).toBe('value9'); + }); + + it('should handle special characters in messages', () => { + const specialMessage = 'Special chars: πŸš€ Γ± ΓΌ'; + + logger.info(specialMessage); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].msg).toBe(specialMessage); + }); + + it('should handle empty and whitespace-only messages', () => { + logger.info(''); + logger.info(' '); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(2); + expect(logs[0].msg).toBe(''); + expect(logs[1].msg).toBe(' '); + }); + }); +}); diff --git a/libs/logger/test/basic.test.ts b/libs/logger/test/basic.test.ts index 7a68c11..5cbe34e 100644 --- a/libs/logger/test/basic.test.ts +++ b/libs/logger/test/basic.test.ts @@ -1,169 +1,169 @@ -/** - * Basic Logger Tests - * - * Tests for the core logger functionality and utilities. - */ - -import { describe, it, expect, beforeEach, afterEach } from 'bun:test'; -import { Logger, getLogger, shutdownLoggers } from '../src'; -import { loggerTestHelpers } from './setup'; - -describe('Basic Logger Tests', () => { - let logger: Logger; - let testLoggerInstance: ReturnType; - - beforeEach(() => { - testLoggerInstance = loggerTestHelpers.createTestLogger('utils-test'); - logger = testLoggerInstance.logger; - }); - afterEach(async () => { - testLoggerInstance.clearCapturedLogs(); - // Clear any global logger cache - await shutdownLoggers(); - }); - - describe('Logger Factory Functions', () => { - it('should create logger with getLogger', () => { - expect(typeof getLogger).toBe('function'); - - // Test that getLogger doesn't throw - expect(() => { - const anotherTestLoggerInstance = loggerTestHelpers.createTestLogger('factory-test'); - anotherTestLoggerInstance.logger.info('Factory test'); - }).not.toThrow(); - }); - }); - - describe('Logger Methods', () => { - it('should have all required logging methods', () => { - expect(typeof logger.debug).toBe('function'); - expect(typeof logger.info).toBe('function'); - expect(typeof logger.warn).toBe('function'); - expect(typeof logger.error).toBe('function'); - expect(typeof logger.child).toBe('function'); - }); - - it('should log with different message types', () => { - // String message - logger.info('String message'); - - // Object message - logger.info({ event: 'object_message', data: 'test' }); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(2); - expect(logs[0].msg).toBe('String message'); - expect(logs[1].level).toBe('info'); - }); - - it('should handle metadata correctly', () => { - const metadata = { - userId: 'user123', - sessionId: 'session456', - requestId: 'req789' - }; - - logger.info('Request processed', metadata); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].userId).toBe('user123'); - expect(logs[0].sessionId).toBe('session456'); - expect(logs[0].requestId).toBe('req789'); - }); - }); - - describe('Child Logger Functionality', () => { - it('should create child loggers with additional context', () => { - const childLogger = logger.child({ - module: 'payment', - version: '1.0.0' - }); - - childLogger.info('Payment processed'); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].msg).toBe('Payment processed'); - }); - - it('should inherit service name in child loggers', () => { - const childLogger = logger.child({ operation: 'test' }); - childLogger.info('Child operation'); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].service).toBe('utils-test'); - }); - }); - - describe('Error Normalization', () => { - it('should handle Error objects', () => { - const error = new Error('Test error'); - error.stack = 'Error stack trace'; - - logger.error('Error test', error); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('error'); - }); - - it('should handle error-like objects', () => { - const errorLike = { - name: 'ValidationError', - message: 'Invalid input', - code: 'VALIDATION_FAILED' - }; - - logger.error('Validation failed', { error: errorLike }); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('error'); - }); - - it('should handle primitive error values', () => { - logger.error('Simple error', { error: 'Error string' }); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('error'); - }); - }); - - describe('Service Context', () => { - it('should include service name in all logs', () => { - logger.debug('Debug message'); - logger.info('Info message'); - logger.warn('Warn message'); - logger.error('Error message'); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(4); - - logs.forEach(log => { - expect(log.service).toBe('utils-test'); - }); - }); - - it('should support different service names', () => { - const logger1Instance = loggerTestHelpers.createTestLogger('service-one'); - const logger2Instance = loggerTestHelpers.createTestLogger('service-two'); - - logger1Instance.logger.info('Message from service one'); - logger2Instance.logger.info('Message from service two'); - - // Since each logger instance has its own capture, we check them separately - // or combine them if that's the desired test logic. - // For this test, it seems we want to ensure they are separate. - const logs1 = logger1Instance.getCapturedLogs(); - expect(logs1.length).toBe(1); - expect(logs1[0].service).toBe('service-one'); - - const logs2 = logger2Instance.getCapturedLogs(); - expect(logs2.length).toBe(1); - expect(logs2[0].service).toBe('service-two'); - }); - }); -}); +/** + * Basic Logger Tests + * + * Tests for the core logger functionality and utilities. + */ + +import { describe, it, expect, beforeEach, afterEach } from 'bun:test'; +import { Logger, getLogger, shutdownLoggers } from '../src'; +import { loggerTestHelpers } from './setup'; + +describe('Basic Logger Tests', () => { + let logger: Logger; + let testLoggerInstance: ReturnType; + + beforeEach(() => { + testLoggerInstance = loggerTestHelpers.createTestLogger('utils-test'); + logger = testLoggerInstance.logger; + }); + afterEach(async () => { + testLoggerInstance.clearCapturedLogs(); + // Clear any global logger cache + await shutdownLoggers(); + }); + + describe('Logger Factory Functions', () => { + it('should create logger with getLogger', () => { + expect(typeof getLogger).toBe('function'); + + // Test that getLogger doesn't throw + expect(() => { + const anotherTestLoggerInstance = loggerTestHelpers.createTestLogger('factory-test'); + anotherTestLoggerInstance.logger.info('Factory test'); + }).not.toThrow(); + }); + }); + + describe('Logger Methods', () => { + it('should have all required logging methods', () => { + expect(typeof logger.debug).toBe('function'); + expect(typeof logger.info).toBe('function'); + expect(typeof logger.warn).toBe('function'); + expect(typeof logger.error).toBe('function'); + expect(typeof logger.child).toBe('function'); + }); + + it('should log with different message types', () => { + // String message + logger.info('String message'); + + // Object message + logger.info({ event: 'object_message', data: 'test' }); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(2); + expect(logs[0].msg).toBe('String message'); + expect(logs[1].level).toBe('info'); + }); + + it('should handle metadata correctly', () => { + const metadata = { + userId: 'user123', + sessionId: 'session456', + requestId: 'req789' + }; + + logger.info('Request processed', metadata); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].userId).toBe('user123'); + expect(logs[0].sessionId).toBe('session456'); + expect(logs[0].requestId).toBe('req789'); + }); + }); + + describe('Child Logger Functionality', () => { + it('should create child loggers with additional context', () => { + const childLogger = logger.child({ + module: 'payment', + version: '1.0.0' + }); + + childLogger.info('Payment processed'); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].msg).toBe('Payment processed'); + }); + + it('should inherit service name in child loggers', () => { + const childLogger = logger.child({ operation: 'test' }); + childLogger.info('Child operation'); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].service).toBe('utils-test'); + }); + }); + + describe('Error Normalization', () => { + it('should handle Error objects', () => { + const error = new Error('Test error'); + error.stack = 'Error stack trace'; + + logger.error('Error test', error); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('error'); + }); + + it('should handle error-like objects', () => { + const errorLike = { + name: 'ValidationError', + message: 'Invalid input', + code: 'VALIDATION_FAILED' + }; + + logger.error('Validation failed', { error: errorLike }); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('error'); + }); + + it('should handle primitive error values', () => { + logger.error('Simple error', { error: 'Error string' }); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('error'); + }); + }); + + describe('Service Context', () => { + it('should include service name in all logs', () => { + logger.debug('Debug message'); + logger.info('Info message'); + logger.warn('Warn message'); + logger.error('Error message'); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(4); + + logs.forEach(log => { + expect(log.service).toBe('utils-test'); + }); + }); + + it('should support different service names', () => { + const logger1Instance = loggerTestHelpers.createTestLogger('service-one'); + const logger2Instance = loggerTestHelpers.createTestLogger('service-two'); + + logger1Instance.logger.info('Message from service one'); + logger2Instance.logger.info('Message from service two'); + + // Since each logger instance has its own capture, we check them separately + // or combine them if that's the desired test logic. + // For this test, it seems we want to ensure they are separate. + const logs1 = logger1Instance.getCapturedLogs(); + expect(logs1.length).toBe(1); + expect(logs1[0].service).toBe('service-one'); + + const logs2 = logger2Instance.getCapturedLogs(); + expect(logs2.length).toBe(1); + expect(logs2[0].service).toBe('service-two'); + }); + }); +}); diff --git a/libs/logger/test/integration.test.ts b/libs/logger/test/integration.test.ts index 0204167..ac6cc35 100644 --- a/libs/logger/test/integration.test.ts +++ b/libs/logger/test/integration.test.ts @@ -1,192 +1,192 @@ -/** - * Logger Integration Tests - * - * Tests the core functionality of the simplified @stock-bot/logger package. - */ - -import { describe, it, expect, beforeEach, afterEach } from 'bun:test'; -import { - Logger, - getLogger, - shutdownLoggers -} from '../src'; -import { loggerTestHelpers } from './setup'; - -describe('Logger Integration Tests', () => { - let logger: Logger; - let testLoggerInstance: ReturnType; - - beforeEach(() => { - testLoggerInstance = loggerTestHelpers.createTestLogger('integration-test'); - logger = testLoggerInstance.logger; - }); - afterEach(async () => { - testLoggerInstance.clearCapturedLogs(); - // Clear any global logger cache - await shutdownLoggers(); - }); - - describe('Core Logger Functionality', () => { - it('should log messages at different levels', () => { - // Test multiple log levels - logger.debug('Debug message'); - logger.info('Info message'); - logger.warn('Warning message'); - logger.error('Error message'); - - // Get captured logs - const logs = testLoggerInstance.getCapturedLogs(); - - // Verify logs were captured - expect(logs.length).toBe(4); - expect(logs[0].level).toBe('debug'); - expect(logs[0].msg).toBe('Debug message'); - expect(logs[1].level).toBe('info'); - expect(logs[1].msg).toBe('Info message'); - expect(logs[2].level).toBe('warn'); - expect(logs[2].msg).toBe('Warning message'); - expect(logs[3].level).toBe('error'); - expect(logs[3].msg).toBe('Error message'); - }); - - it('should log objects as structured logs', () => { - // Log an object - logger.info('User logged in', { userId: '123', action: 'login' }); - - // Get captured logs - const logs = testLoggerInstance.getCapturedLogs(); - - // Verify structured log - expect(logs.length).toBe(1); - expect(logs[0].userId).toBe('123'); - expect(logs[0].action).toBe('login'); - expect(logs[0].msg).toBe('User logged in'); - }); - - it('should handle error objects in error logs', () => { - const testError = new Error('Test error message'); - - // Log error with error object - logger.error('Something went wrong', { error: testError }); - - // Get captured logs - const logs = testLoggerInstance.getCapturedLogs(); - - // Verify error was logged - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('error'); - expect(logs[0].msg).toBe('Something went wrong'); - }); - - it('should create child loggers with additional context', () => { - // Create a child logger with additional context - const childLogger = logger.child({ - transactionId: 'tx-789', - operation: 'payment' - }); - - // Log with child logger - childLogger.info('Child logger test'); - - // Get captured logs - const logs = testLoggerInstance.getCapturedLogs(); - - // Verify child logger logged something - expect(logs.length).toBe(1); - expect(logs[0].msg).toBe('Child logger test'); - }); - }); - - describe('Factory Functions', () => { - it('should export factory functions', () => { - // Verify that the factory functions are exported and callable - expect(typeof getLogger).toBe('function'); - }); - - it('should create different logger instances', () => { - const logger1Instance = loggerTestHelpers.createTestLogger('service-1'); - const logger2Instance = loggerTestHelpers.createTestLogger('service-2'); - - logger1Instance.logger.info('Message from service 1'); - logger2Instance.logger.info('Message from service 2'); - - const logs1 = logger1Instance.getCapturedLogs(); - expect(logs1.length).toBe(1); - expect(logs1[0].service).toBe('service-1'); - - const logs2 = logger2Instance.getCapturedLogs(); - expect(logs2.length).toBe(1); - expect(logs2[0].service).toBe('service-2'); - }); - }); - - describe('Error Handling', () => { - it('should normalize Error objects', () => { - const error = new Error('Test error'); - error.stack = 'Error stack trace'; - - logger.error('Error occurred', error); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('error'); - expect(logs[0].msg).toBe('Error occurred'); - }); - - it('should handle error-like objects', () => { - const errorLike = { - name: 'CustomError', - message: 'Custom error message', - code: 'ERR_CUSTOM' - }; - - logger.error('Custom error occurred', { error: errorLike }); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('error'); - expect(logs[0].msg).toBe('Custom error occurred'); - }); - - it('should handle primitive error values', () => { - logger.error('String error occurred', { error: 'Simple string error' }); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('error'); - expect(logs[0].msg).toBe('String error occurred'); - }); - }); - - describe('Metadata Handling', () => { - it('should include metadata in logs', () => { - const metadata = { - requestId: 'req-123', - userId: 'user-456', - operation: 'data-fetch' - }; - - logger.info('Operation completed', metadata); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].requestId).toBe('req-123'); - expect(logs[0].userId).toBe('user-456'); - expect(logs[0].operation).toBe('data-fetch'); - }); - - it('should handle object messages', () => { - const objectMessage = { - event: 'user_action', - action: 'login', - timestamp: Date.now() - }; - - logger.info(objectMessage); - - const logs = testLoggerInstance.getCapturedLogs(); - expect(logs.length).toBe(1); - expect(logs[0].level).toBe('info'); - }); - }); -}); +/** + * Logger Integration Tests + * + * Tests the core functionality of the simplified @stock-bot/logger package. + */ + +import { describe, it, expect, beforeEach, afterEach } from 'bun:test'; +import { + Logger, + getLogger, + shutdownLoggers +} from '../src'; +import { loggerTestHelpers } from './setup'; + +describe('Logger Integration Tests', () => { + let logger: Logger; + let testLoggerInstance: ReturnType; + + beforeEach(() => { + testLoggerInstance = loggerTestHelpers.createTestLogger('integration-test'); + logger = testLoggerInstance.logger; + }); + afterEach(async () => { + testLoggerInstance.clearCapturedLogs(); + // Clear any global logger cache + await shutdownLoggers(); + }); + + describe('Core Logger Functionality', () => { + it('should log messages at different levels', () => { + // Test multiple log levels + logger.debug('Debug message'); + logger.info('Info message'); + logger.warn('Warning message'); + logger.error('Error message'); + + // Get captured logs + const logs = testLoggerInstance.getCapturedLogs(); + + // Verify logs were captured + expect(logs.length).toBe(4); + expect(logs[0].level).toBe('debug'); + expect(logs[0].msg).toBe('Debug message'); + expect(logs[1].level).toBe('info'); + expect(logs[1].msg).toBe('Info message'); + expect(logs[2].level).toBe('warn'); + expect(logs[2].msg).toBe('Warning message'); + expect(logs[3].level).toBe('error'); + expect(logs[3].msg).toBe('Error message'); + }); + + it('should log objects as structured logs', () => { + // Log an object + logger.info('User logged in', { userId: '123', action: 'login' }); + + // Get captured logs + const logs = testLoggerInstance.getCapturedLogs(); + + // Verify structured log + expect(logs.length).toBe(1); + expect(logs[0].userId).toBe('123'); + expect(logs[0].action).toBe('login'); + expect(logs[0].msg).toBe('User logged in'); + }); + + it('should handle error objects in error logs', () => { + const testError = new Error('Test error message'); + + // Log error with error object + logger.error('Something went wrong', { error: testError }); + + // Get captured logs + const logs = testLoggerInstance.getCapturedLogs(); + + // Verify error was logged + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('error'); + expect(logs[0].msg).toBe('Something went wrong'); + }); + + it('should create child loggers with additional context', () => { + // Create a child logger with additional context + const childLogger = logger.child({ + transactionId: 'tx-789', + operation: 'payment' + }); + + // Log with child logger + childLogger.info('Child logger test'); + + // Get captured logs + const logs = testLoggerInstance.getCapturedLogs(); + + // Verify child logger logged something + expect(logs.length).toBe(1); + expect(logs[0].msg).toBe('Child logger test'); + }); + }); + + describe('Factory Functions', () => { + it('should export factory functions', () => { + // Verify that the factory functions are exported and callable + expect(typeof getLogger).toBe('function'); + }); + + it('should create different logger instances', () => { + const logger1Instance = loggerTestHelpers.createTestLogger('service-1'); + const logger2Instance = loggerTestHelpers.createTestLogger('service-2'); + + logger1Instance.logger.info('Message from service 1'); + logger2Instance.logger.info('Message from service 2'); + + const logs1 = logger1Instance.getCapturedLogs(); + expect(logs1.length).toBe(1); + expect(logs1[0].service).toBe('service-1'); + + const logs2 = logger2Instance.getCapturedLogs(); + expect(logs2.length).toBe(1); + expect(logs2[0].service).toBe('service-2'); + }); + }); + + describe('Error Handling', () => { + it('should normalize Error objects', () => { + const error = new Error('Test error'); + error.stack = 'Error stack trace'; + + logger.error('Error occurred', error); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('error'); + expect(logs[0].msg).toBe('Error occurred'); + }); + + it('should handle error-like objects', () => { + const errorLike = { + name: 'CustomError', + message: 'Custom error message', + code: 'ERR_CUSTOM' + }; + + logger.error('Custom error occurred', { error: errorLike }); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('error'); + expect(logs[0].msg).toBe('Custom error occurred'); + }); + + it('should handle primitive error values', () => { + logger.error('String error occurred', { error: 'Simple string error' }); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('error'); + expect(logs[0].msg).toBe('String error occurred'); + }); + }); + + describe('Metadata Handling', () => { + it('should include metadata in logs', () => { + const metadata = { + requestId: 'req-123', + userId: 'user-456', + operation: 'data-fetch' + }; + + logger.info('Operation completed', metadata); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].requestId).toBe('req-123'); + expect(logs[0].userId).toBe('user-456'); + expect(logs[0].operation).toBe('data-fetch'); + }); + + it('should handle object messages', () => { + const objectMessage = { + event: 'user_action', + action: 'login', + timestamp: Date.now() + }; + + logger.info(objectMessage); + + const logs = testLoggerInstance.getCapturedLogs(); + expect(logs.length).toBe(1); + expect(logs[0].level).toBe('info'); + }); + }); +}); diff --git a/libs/logger/test/setup.ts b/libs/logger/test/setup.ts index f040472..3601a4a 100644 --- a/libs/logger/test/setup.ts +++ b/libs/logger/test/setup.ts @@ -1,137 +1,137 @@ -/** - * Logger Test Setup - * - * Setup file specific to Logger library tests. - * Provides utilities and mocks for testing logging operations. - */ - -import { Logger, LogMetadata, shutdownLoggers } from '../src'; -import { afterAll, afterEach, beforeAll, beforeEach } from 'bun:test'; - -// Store original console methods -const originalConsole = { - log: console.log, - info: console.info, - warn: console.warn, - error: console.error, - debug: console.debug -}; - -// Create a test logger helper -export const loggerTestHelpers = { - - - /** - * Mock Loki transport - */ - mockLokiTransport: () => ({ - on: () => {}, - write: () => {} - }), - /** - * Create a mock Hono context for middleware tests - */ createHonoContextMock: (options: any = {}) => { - // Default path and method - const path = options.path || '/test'; - const method = options.method || 'GET'; - - // Create request headers - const headerEntries = Object.entries(options.req?.headers || {}); - const headerMap = new Map(headerEntries); - const rawHeaders = new Headers(); - headerEntries.forEach(([key, value]) => rawHeaders.set(key, value as string)); - - // Create request with standard properties needed for middleware - const req = { - method, - url: `http://localhost${path}`, - path, - raw: { - url: `http://localhost${path}`, - method, - headers: rawHeaders - }, - query: {}, - param: () => undefined, - header: (name: string) => rawHeaders.get(name.toLowerCase()), - headers: headerMap, - ...options.req - }; - - // Create mock response - const res = { - status: 200, - statusText: 'OK', - body: null, - headers: new Map(), - clone: function() { return { ...this, text: async () => JSON.stringify(this.body) }; }, - text: async () => JSON.stringify(res.body), - ...options.res - }; - - // Create context with all required Hono methods - const c: any = { - req, - env: {}, - res, - header: (name: string, value: string) => { - c.res.headers.set(name.toLowerCase(), value); - return c; - }, - get: (key: string) => c[key], - set: (key: string, value: any) => { c[key] = value; return c; }, - status: (code: number) => { c.res.status = code; return c; }, - json: (body: any) => { c.res.body = body; return c; }, - executionCtx: { waitUntil: (fn: Function) => { fn(); } } - }; - - return c; - }, - - /** - * Create a mock Next function for middleware tests - */ - createNextMock: () => { - return async () => { - // Do nothing, simulate middleware completion - return; - }; - } -}; - -// Setup environment before tests -beforeAll(() => { - // Don't let real logs through during tests - console.log = () => {}; - console.info = () => {}; - console.warn = () => {}; - console.error = () => {}; - console.debug = () => {}; - - // Override NODE_ENV for tests - process.env.NODE_ENV = 'test'; - - // Disable real logging during tests - process.env.LOG_LEVEL = 'silent'; - process.env.LOG_CONSOLE = 'false'; - process.env.LOG_FILE = 'false'; - - // Mock Loki config to prevent real connections - process.env.LOKI_HOST = ''; - process.env.LOKI_URL = ''; -}); - -// Clean up after each test -afterEach(async () => { - // Clear logger cache to prevent state pollution between tests - await shutdownLoggers(); -}); - -// Restore everything after tests -afterAll(() => { - console.log = originalConsole.log; - console.info = originalConsole.info; - console.warn = originalConsole.warn; - console.error = originalConsole.error; - console.debug = originalConsole.debug; -}); +/** + * Logger Test Setup + * + * Setup file specific to Logger library tests. + * Provides utilities and mocks for testing logging operations. + */ + +import { Logger, LogMetadata, shutdownLoggers } from '../src'; +import { afterAll, afterEach, beforeAll, beforeEach } from 'bun:test'; + +// Store original console methods +const originalConsole = { + log: console.log, + info: console.info, + warn: console.warn, + error: console.error, + debug: console.debug +}; + +// Create a test logger helper +export const loggerTestHelpers = { + + + /** + * Mock Loki transport + */ + mockLokiTransport: () => ({ + on: () => {}, + write: () => {} + }), + /** + * Create a mock Hono context for middleware tests + */ createHonoContextMock: (options: any = {}) => { + // Default path and method + const path = options.path || '/test'; + const method = options.method || 'GET'; + + // Create request headers + const headerEntries = Object.entries(options.req?.headers || {}); + const headerMap = new Map(headerEntries); + const rawHeaders = new Headers(); + headerEntries.forEach(([key, value]) => rawHeaders.set(key, value as string)); + + // Create request with standard properties needed for middleware + const req = { + method, + url: `http://localhost${path}`, + path, + raw: { + url: `http://localhost${path}`, + method, + headers: rawHeaders + }, + query: {}, + param: () => undefined, + header: (name: string) => rawHeaders.get(name.toLowerCase()), + headers: headerMap, + ...options.req + }; + + // Create mock response + const res = { + status: 200, + statusText: 'OK', + body: null, + headers: new Map(), + clone: function() { return { ...this, text: async () => JSON.stringify(this.body) }; }, + text: async () => JSON.stringify(res.body), + ...options.res + }; + + // Create context with all required Hono methods + const c: any = { + req, + env: {}, + res, + header: (name: string, value: string) => { + c.res.headers.set(name.toLowerCase(), value); + return c; + }, + get: (key: string) => c[key], + set: (key: string, value: any) => { c[key] = value; return c; }, + status: (code: number) => { c.res.status = code; return c; }, + json: (body: any) => { c.res.body = body; return c; }, + executionCtx: { waitUntil: (fn: Function) => { fn(); } } + }; + + return c; + }, + + /** + * Create a mock Next function for middleware tests + */ + createNextMock: () => { + return async () => { + // Do nothing, simulate middleware completion + return; + }; + } +}; + +// Setup environment before tests +beforeAll(() => { + // Don't let real logs through during tests + console.log = () => {}; + console.info = () => {}; + console.warn = () => {}; + console.error = () => {}; + console.debug = () => {}; + + // Override NODE_ENV for tests + process.env.NODE_ENV = 'test'; + + // Disable real logging during tests + process.env.LOG_LEVEL = 'silent'; + process.env.LOG_CONSOLE = 'false'; + process.env.LOG_FILE = 'false'; + + // Mock Loki config to prevent real connections + process.env.LOKI_HOST = ''; + process.env.LOKI_URL = ''; +}); + +// Clean up after each test +afterEach(async () => { + // Clear logger cache to prevent state pollution between tests + await shutdownLoggers(); +}); + +// Restore everything after tests +afterAll(() => { + console.log = originalConsole.log; + console.info = originalConsole.info; + console.warn = originalConsole.warn; + console.error = originalConsole.error; + console.debug = originalConsole.debug; +}); diff --git a/libs/logger/tsconfig.json b/libs/logger/tsconfig.json index 3e8f42b..3bf8063 100644 --- a/libs/logger/tsconfig.json +++ b/libs/logger/tsconfig.json @@ -1,12 +1,12 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../config" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../config" } + ] +} diff --git a/libs/logger/turbo.json b/libs/logger/turbo.json index 9dbb1ee..9d8964a 100644 --- a/libs/logger/turbo.json +++ b/libs/logger/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/mongodb-client/README.md b/libs/mongodb-client/README.md index 42e884a..204df2d 100644 --- a/libs/mongodb-client/README.md +++ b/libs/mongodb-client/README.md @@ -1,72 +1,72 @@ -# MongoDB Client Library - -A comprehensive MongoDB client library for the Stock Bot trading platform, designed for handling document storage, raw data, and unstructured content. - -## Features - -- **Connection Management**: Robust connection pooling and failover -- **Schema Validation**: Built-in validation using Zod schemas -- **Type Safety**: Full TypeScript support with typed collections -- **Error Handling**: Comprehensive error handling and retry logic -- **Health Monitoring**: Connection health monitoring and metrics -- **Transactions**: Support for multi-document transactions -- **Aggregation**: Helper methods for complex aggregation pipelines - -## Usage - -```typescript -import { MongoDBClient } from '@stock-bot/mongodb-client'; - -// Initialize client -const mongoClient = new MongoDBClient(); -await mongoClient.connect(); - -// Get a typed collection -const collection = mongoClient.getCollection('sentiment_data'); - -// Insert document -await collection.insertOne({ - symbol: 'AAPL', - sentiment: 'positive', - source: 'reddit', - timestamp: new Date() -}); - -// Query with aggregation -const results = await collection.aggregate([ - { $match: { symbol: 'AAPL' } }, - { $group: { _id: '$sentiment', count: { $sum: 1 } } } -]); -``` - -## Collections - -The client provides typed access to the following collections: - -- **sentiment_data**: Social media sentiment analysis -- **raw_documents**: Unprocessed documents and content -- **news_articles**: Financial news and articles -- **sec_filings**: SEC filing documents -- **earnings_transcripts**: Earnings call transcripts -- **analyst_reports**: Research reports and analysis - -## Configuration - -Configure using environment variables: - -```env -MONGODB_HOST=localhost -MONGODB_PORT=27017 -MONGODB_DATABASE=trading_documents -MONGODB_USERNAME=trading_admin -MONGODB_PASSWORD=your_password -``` - -## Health Monitoring - -The client includes built-in health monitoring: - -```typescript -const health = await mongoClient.getHealth(); -console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy' -``` +# MongoDB Client Library + +A comprehensive MongoDB client library for the Stock Bot trading platform, designed for handling document storage, raw data, and unstructured content. + +## Features + +- **Connection Management**: Robust connection pooling and failover +- **Schema Validation**: Built-in validation using Zod schemas +- **Type Safety**: Full TypeScript support with typed collections +- **Error Handling**: Comprehensive error handling and retry logic +- **Health Monitoring**: Connection health monitoring and metrics +- **Transactions**: Support for multi-document transactions +- **Aggregation**: Helper methods for complex aggregation pipelines + +## Usage + +```typescript +import { MongoDBClient } from '@stock-bot/mongodb-client'; + +// Initialize client +const mongoClient = new MongoDBClient(); +await mongoClient.connect(); + +// Get a typed collection +const collection = mongoClient.getCollection('sentiment_data'); + +// Insert document +await collection.insertOne({ + symbol: 'AAPL', + sentiment: 'positive', + source: 'reddit', + timestamp: new Date() +}); + +// Query with aggregation +const results = await collection.aggregate([ + { $match: { symbol: 'AAPL' } }, + { $group: { _id: '$sentiment', count: { $sum: 1 } } } +]); +``` + +## Collections + +The client provides typed access to the following collections: + +- **sentiment_data**: Social media sentiment analysis +- **raw_documents**: Unprocessed documents and content +- **news_articles**: Financial news and articles +- **sec_filings**: SEC filing documents +- **earnings_transcripts**: Earnings call transcripts +- **analyst_reports**: Research reports and analysis + +## Configuration + +Configure using environment variables: + +```env +MONGODB_HOST=localhost +MONGODB_PORT=27017 +MONGODB_DATABASE=trading_documents +MONGODB_USERNAME=trading_admin +MONGODB_PASSWORD=your_password +``` + +## Health Monitoring + +The client includes built-in health monitoring: + +```typescript +const health = await mongoClient.getHealth(); +console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy' +``` diff --git a/libs/mongodb-client/package.json b/libs/mongodb-client/package.json index 2f2a942..a0ad076 100644 --- a/libs/mongodb-client/package.json +++ b/libs/mongodb-client/package.json @@ -1,51 +1,51 @@ -{ - "name": "@stock-bot/mongodb-client", - "version": "1.0.0", - "description": "MongoDB client library for Stock Bot platform", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "test": "bun test", - "lint": "eslint src/**/*.ts", - "type-check": "tsc --noEmit", - "clean": "rimraf dist" - }, - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "@types/mongodb": "^4.0.7", - "mongodb": "^6.17.0", - "yup": "^1.6.1" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "eslint": "^8.56.0", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15" - }, - "keywords": [ - "mongodb", - "database", - "client", - "stock-bot" - ], - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ], - "paths": { - "*": ["node_modules/*", "../../node_modules/*"] - } -} +{ + "name": "@stock-bot/mongodb-client", + "version": "1.0.0", + "description": "MongoDB client library for Stock Bot platform", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "test": "bun test", + "lint": "eslint src/**/*.ts", + "type-check": "tsc --noEmit", + "clean": "rimraf dist" + }, + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "@types/mongodb": "^4.0.7", + "mongodb": "^6.17.0", + "yup": "^1.6.1" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "eslint": "^8.56.0", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15" + }, + "keywords": [ + "mongodb", + "database", + "client", + "stock-bot" + ], + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ], + "paths": { + "*": ["node_modules/*", "../../node_modules/*"] + } +} diff --git a/libs/mongodb-client/src/aggregation.ts b/libs/mongodb-client/src/aggregation.ts index f5dac02..38f7acf 100644 --- a/libs/mongodb-client/src/aggregation.ts +++ b/libs/mongodb-client/src/aggregation.ts @@ -1,247 +1,247 @@ -import type { Document } from 'mongodb'; -import type { MongoDBClient } from './client'; -import type { CollectionNames } from './types'; - -/** - * MongoDB Aggregation Builder - * - * Provides a fluent interface for building MongoDB aggregation pipelines - */ -export class MongoDBAggregationBuilder { - private pipeline: any[] = []; - private readonly client: MongoDBClient; - private collection: CollectionNames | null = null; - - constructor(client: MongoDBClient) { - this.client = client; - } - - /** - * Set the collection to aggregate on - */ - from(collection: CollectionNames): this { - this.collection = collection; - return this; - } - - /** - * Add a match stage - */ - match(filter: any): this { - this.pipeline.push({ $match: filter }); - return this; - } - - /** - * Add a group stage - */ - group(groupBy: any): this { - this.pipeline.push({ $group: groupBy }); - return this; - } - - /** - * Add a sort stage - */ - sort(sortBy: any): this { - this.pipeline.push({ $sort: sortBy }); - return this; - } - - /** - * Add a limit stage - */ - limit(count: number): this { - this.pipeline.push({ $limit: count }); - return this; - } - - /** - * Add a skip stage - */ - skip(count: number): this { - this.pipeline.push({ $skip: count }); - return this; - } - - /** - * Add a project stage - */ - project(projection: any): this { - this.pipeline.push({ $project: projection }); - return this; - } - - /** - * Add an unwind stage - */ - unwind(field: string, options?: any): this { - this.pipeline.push({ - $unwind: options ? { path: field, ...options } : field - }); - return this; - } - - /** - * Add a lookup stage (join) - */ - lookup(from: string, localField: string, foreignField: string, as: string): this { - this.pipeline.push({ - $lookup: { - from, - localField, - foreignField, - as - } - }); - return this; - } - - /** - * Add a custom stage - */ - addStage(stage: any): this { - this.pipeline.push(stage); - return this; - } - /** - * Execute the aggregation pipeline - */ - async execute(): Promise { - if (!this.collection) { - throw new Error('Collection not specified. Use .from() to set the collection.'); - } - - const collection = this.client.getCollection(this.collection); - return await collection.aggregate(this.pipeline).toArray(); - } - - /** - * Get the pipeline array - */ - getPipeline(): any[] { - return [...this.pipeline]; - } - - /** - * Reset the pipeline - */ - reset(): this { - this.pipeline = []; - this.collection = null; - return this; - } - - // Convenience methods for common aggregations - - /** - * Sentiment analysis aggregation - */ - sentimentAnalysis(symbol?: string, timeframe?: { start: Date; end: Date }): this { - this.from('sentiment_data'); - - const matchConditions: any = {}; - if (symbol) matchConditions.symbol = symbol; - if (timeframe) { - matchConditions.timestamp = { - $gte: timeframe.start, - $lte: timeframe.end - }; - } - - if (Object.keys(matchConditions).length > 0) { - this.match(matchConditions); - } - - return this.group({ - _id: { - symbol: '$symbol', - sentiment: '$sentiment_label' - }, - count: { $sum: 1 }, - avgScore: { $avg: '$sentiment_score' }, - avgConfidence: { $avg: '$confidence' } - }); - } - - /** - * News article aggregation by publication - */ - newsByPublication(symbols?: string[]): this { - this.from('news_articles'); - - if (symbols && symbols.length > 0) { - this.match({ symbols: { $in: symbols } }); - } - - return this.group({ - _id: '$publication', - articleCount: { $sum: 1 }, - symbols: { $addToSet: '$symbols' }, - avgSentiment: { $avg: '$sentiment_score' }, - latestArticle: { $max: '$published_date' } - }); - } - - /** - * SEC filings by company - */ - secFilingsByCompany(filingTypes?: string[]): this { - this.from('sec_filings'); - - if (filingTypes && filingTypes.length > 0) { - this.match({ filing_type: { $in: filingTypes } }); - } - - return this.group({ - _id: { - cik: '$cik', - company: '$company_name' - }, - filingCount: { $sum: 1 }, - filingTypes: { $addToSet: '$filing_type' }, - latestFiling: { $max: '$filing_date' }, - symbols: { $addToSet: '$symbols' } - }); - } - - /** - * Document processing status summary - */ - processingStatusSummary(collection: CollectionNames): this { - this.from(collection); - - return this.group({ - _id: '$processing_status', - count: { $sum: 1 }, - avgSizeBytes: { $avg: '$size_bytes' }, - oldestDocument: { $min: '$created_at' }, - newestDocument: { $max: '$created_at' } - }); - } - - /** - * Time-based aggregation (daily/hourly counts) - */ - timeBasedCounts( - collection: CollectionNames, - dateField: string = 'created_at', - interval: 'hour' | 'day' | 'week' | 'month' = 'day' - ): this { - this.from(collection); - - const dateFormat = { - hour: { $dateToString: { format: '%Y-%m-%d %H:00:00', date: `$${dateField}` } }, - day: { $dateToString: { format: '%Y-%m-%d', date: `$${dateField}` } }, - week: { $dateToString: { format: '%Y-W%V', date: `$${dateField}` } }, - month: { $dateToString: { format: '%Y-%m', date: `$${dateField}` } } - }; - - return this.group({ - _id: dateFormat[interval], - count: { $sum: 1 }, - firstDocument: { $min: `$${dateField}` }, - lastDocument: { $max: `$${dateField}` } - }).sort({ _id: 1 }); - } -} +import type { Document } from 'mongodb'; +import type { MongoDBClient } from './client'; +import type { CollectionNames } from './types'; + +/** + * MongoDB Aggregation Builder + * + * Provides a fluent interface for building MongoDB aggregation pipelines + */ +export class MongoDBAggregationBuilder { + private pipeline: any[] = []; + private readonly client: MongoDBClient; + private collection: CollectionNames | null = null; + + constructor(client: MongoDBClient) { + this.client = client; + } + + /** + * Set the collection to aggregate on + */ + from(collection: CollectionNames): this { + this.collection = collection; + return this; + } + + /** + * Add a match stage + */ + match(filter: any): this { + this.pipeline.push({ $match: filter }); + return this; + } + + /** + * Add a group stage + */ + group(groupBy: any): this { + this.pipeline.push({ $group: groupBy }); + return this; + } + + /** + * Add a sort stage + */ + sort(sortBy: any): this { + this.pipeline.push({ $sort: sortBy }); + return this; + } + + /** + * Add a limit stage + */ + limit(count: number): this { + this.pipeline.push({ $limit: count }); + return this; + } + + /** + * Add a skip stage + */ + skip(count: number): this { + this.pipeline.push({ $skip: count }); + return this; + } + + /** + * Add a project stage + */ + project(projection: any): this { + this.pipeline.push({ $project: projection }); + return this; + } + + /** + * Add an unwind stage + */ + unwind(field: string, options?: any): this { + this.pipeline.push({ + $unwind: options ? { path: field, ...options } : field + }); + return this; + } + + /** + * Add a lookup stage (join) + */ + lookup(from: string, localField: string, foreignField: string, as: string): this { + this.pipeline.push({ + $lookup: { + from, + localField, + foreignField, + as + } + }); + return this; + } + + /** + * Add a custom stage + */ + addStage(stage: any): this { + this.pipeline.push(stage); + return this; + } + /** + * Execute the aggregation pipeline + */ + async execute(): Promise { + if (!this.collection) { + throw new Error('Collection not specified. Use .from() to set the collection.'); + } + + const collection = this.client.getCollection(this.collection); + return await collection.aggregate(this.pipeline).toArray(); + } + + /** + * Get the pipeline array + */ + getPipeline(): any[] { + return [...this.pipeline]; + } + + /** + * Reset the pipeline + */ + reset(): this { + this.pipeline = []; + this.collection = null; + return this; + } + + // Convenience methods for common aggregations + + /** + * Sentiment analysis aggregation + */ + sentimentAnalysis(symbol?: string, timeframe?: { start: Date; end: Date }): this { + this.from('sentiment_data'); + + const matchConditions: any = {}; + if (symbol) matchConditions.symbol = symbol; + if (timeframe) { + matchConditions.timestamp = { + $gte: timeframe.start, + $lte: timeframe.end + }; + } + + if (Object.keys(matchConditions).length > 0) { + this.match(matchConditions); + } + + return this.group({ + _id: { + symbol: '$symbol', + sentiment: '$sentiment_label' + }, + count: { $sum: 1 }, + avgScore: { $avg: '$sentiment_score' }, + avgConfidence: { $avg: '$confidence' } + }); + } + + /** + * News article aggregation by publication + */ + newsByPublication(symbols?: string[]): this { + this.from('news_articles'); + + if (symbols && symbols.length > 0) { + this.match({ symbols: { $in: symbols } }); + } + + return this.group({ + _id: '$publication', + articleCount: { $sum: 1 }, + symbols: { $addToSet: '$symbols' }, + avgSentiment: { $avg: '$sentiment_score' }, + latestArticle: { $max: '$published_date' } + }); + } + + /** + * SEC filings by company + */ + secFilingsByCompany(filingTypes?: string[]): this { + this.from('sec_filings'); + + if (filingTypes && filingTypes.length > 0) { + this.match({ filing_type: { $in: filingTypes } }); + } + + return this.group({ + _id: { + cik: '$cik', + company: '$company_name' + }, + filingCount: { $sum: 1 }, + filingTypes: { $addToSet: '$filing_type' }, + latestFiling: { $max: '$filing_date' }, + symbols: { $addToSet: '$symbols' } + }); + } + + /** + * Document processing status summary + */ + processingStatusSummary(collection: CollectionNames): this { + this.from(collection); + + return this.group({ + _id: '$processing_status', + count: { $sum: 1 }, + avgSizeBytes: { $avg: '$size_bytes' }, + oldestDocument: { $min: '$created_at' }, + newestDocument: { $max: '$created_at' } + }); + } + + /** + * Time-based aggregation (daily/hourly counts) + */ + timeBasedCounts( + collection: CollectionNames, + dateField: string = 'created_at', + interval: 'hour' | 'day' | 'week' | 'month' = 'day' + ): this { + this.from(collection); + + const dateFormat = { + hour: { $dateToString: { format: '%Y-%m-%d %H:00:00', date: `$${dateField}` } }, + day: { $dateToString: { format: '%Y-%m-%d', date: `$${dateField}` } }, + week: { $dateToString: { format: '%Y-W%V', date: `$${dateField}` } }, + month: { $dateToString: { format: '%Y-%m', date: `$${dateField}` } } + }; + + return this.group({ + _id: dateFormat[interval], + count: { $sum: 1 }, + firstDocument: { $min: `$${dateField}` }, + lastDocument: { $max: `$${dateField}` } + }).sort({ _id: 1 }); + } +} diff --git a/libs/mongodb-client/src/client.ts b/libs/mongodb-client/src/client.ts index ebc867d..88f5f66 100644 --- a/libs/mongodb-client/src/client.ts +++ b/libs/mongodb-client/src/client.ts @@ -1,379 +1,379 @@ -import { MongoClient, Db, Collection, MongoClientOptions, Document, WithId, OptionalUnlessRequiredId } from 'mongodb'; -import { mongodbConfig } from '@stock-bot/config'; -import { getLogger } from '@stock-bot/logger'; -import type { - MongoDBClientConfig, - MongoDBConnectionOptions, - CollectionNames, - DocumentBase, - SentimentData, - RawDocument, - NewsArticle, - SecFiling, - EarningsTranscript, - AnalystReport -} from './types'; -import { MongoDBHealthMonitor } from './health'; -import { schemaMap } from './schemas'; -import * as yup from 'yup'; - -/** - * MongoDB Client for Stock Bot - * - * Provides type-safe access to MongoDB collections with built-in - * health monitoring, connection pooling, and schema validation. - */ -export class MongoDBClient { - private client: MongoClient | null = null; - private db: Db | null = null; - private readonly config: MongoDBClientConfig; - private readonly options: MongoDBConnectionOptions; - private readonly logger: ReturnType; - private readonly healthMonitor: MongoDBHealthMonitor; - private isConnected = false; - - constructor( - config?: Partial, - options?: MongoDBConnectionOptions - ) { - this.config = this.buildConfig(config); - this.options = { - retryAttempts: 3, - retryDelay: 1000, - healthCheckInterval: 30000, - ...options - }; - - this.logger = getLogger('mongodb-client'); - this.healthMonitor = new MongoDBHealthMonitor(this); - } - - /** - * Connect to MongoDB - */ - async connect(): Promise { - if (this.isConnected && this.client) { - return; - } - - const uri = this.buildConnectionUri(); - const clientOptions = this.buildClientOptions(); - - let lastError: Error | null = null; - - for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) { - try { - this.logger.info(`Connecting to MongoDB (attempt ${attempt}/${this.options.retryAttempts})...`); - - this.client = new MongoClient(uri, clientOptions); - await this.client.connect(); - - // Test the connection - await this.client.db(this.config.database).admin().ping(); - - this.db = this.client.db(this.config.database); - this.isConnected = true; - - this.logger.info('Successfully connected to MongoDB'); - - // Start health monitoring - this.healthMonitor.start(); - - return; - } catch (error) { - lastError = error as Error; - this.logger.error(`MongoDB connection attempt ${attempt} failed:`, error); - - if (this.client) { - await this.client.close(); - this.client = null; - } - - if (attempt < this.options.retryAttempts!) { - await this.delay(this.options.retryDelay! * attempt); - } - } - } - - throw new Error(`Failed to connect to MongoDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`); - } - - /** - * Disconnect from MongoDB - */ - async disconnect(): Promise { - if (!this.client) { - return; - } - - try { - this.healthMonitor.stop(); - await this.client.close(); - this.isConnected = false; - this.client = null; - this.db = null; - this.logger.info('Disconnected from MongoDB'); - } catch (error) { - this.logger.error('Error disconnecting from MongoDB:', error); - throw error; - } - } - - /** - * Get a typed collection - */ - getCollection(name: CollectionNames): Collection { - if (!this.db) { - throw new Error('MongoDB client not connected'); - } - return this.db.collection(name); - } - - /** - * Insert a document with validation - */ - async insertOne( - collectionName: CollectionNames, - document: Omit & Partial> - ): Promise { - const collection = this.getCollection(collectionName); - - // Add timestamps - const now = new Date(); - const docWithTimestamps = { - ...document, - created_at: document.created_at || now, - updated_at: now - } as T; // Validate document if schema exists - if (collectionName in schemaMap) { - try { - (schemaMap as any)[collectionName].validateSync(docWithTimestamps); - } catch (error) { - if (error instanceof yup.ValidationError) { - this.logger.error(`Document validation failed for ${collectionName}:`, error.errors); - throw new Error(`Document validation failed: ${error.errors?.map(e => e).join(', ')}`); - } - throw error; - } - }const result = await collection.insertOne(docWithTimestamps as OptionalUnlessRequiredId); - return { ...docWithTimestamps, _id: result.insertedId } as T; - } - - /** - * Update a document with validation - */ - async updateOne( - collectionName: CollectionNames, - filter: any, - update: Partial - ): Promise { - const collection = this.getCollection(collectionName); - - // Add updated timestamp - const updateWithTimestamp = { - ...update, - updated_at: new Date() - }; - - const result = await collection.updateOne(filter, { $set: updateWithTimestamp }); - return result.modifiedCount > 0; - } - /** - * Find documents with optional validation - */ - async find( - collectionName: CollectionNames, - filter: any = {}, - options: any = {} - ): Promise { - const collection = this.getCollection(collectionName); - return await collection.find(filter, options).toArray() as T[]; - } - - /** - * Find one document - */ - async findOne( - collectionName: CollectionNames, - filter: any - ): Promise { - const collection = this.getCollection(collectionName); - return await collection.findOne(filter) as T | null; - } - - /** - * Aggregate with type safety - */ - async aggregate( - collectionName: CollectionNames, - pipeline: any[] - ): Promise { - const collection = this.getCollection(collectionName); - return await collection.aggregate(pipeline).toArray(); - } - - /** - * Count documents - */ - async countDocuments( - collectionName: CollectionNames, - filter: any = {} - ): Promise { - const collection = this.getCollection(collectionName); - return await collection.countDocuments(filter); - } - - /** - * Create indexes for better performance - */ - async createIndexes(): Promise { - if (!this.db) { - throw new Error('MongoDB client not connected'); - } - - try { - // Sentiment data indexes - await this.db.collection('sentiment_data').createIndexes([ - { key: { symbol: 1, timestamp: -1 } }, - { key: { sentiment_label: 1 } }, - { key: { source_type: 1 } }, - { key: { created_at: -1 } } - ]); - - // News articles indexes - await this.db.collection('news_articles').createIndexes([ - { key: { symbols: 1, published_date: -1 } }, - { key: { publication: 1 } }, - { key: { categories: 1 } }, - { key: { created_at: -1 } } - ]); - - // SEC filings indexes - await this.db.collection('sec_filings').createIndexes([ - { key: { symbols: 1, filing_date: -1 } }, - { key: { filing_type: 1 } }, - { key: { cik: 1 } }, - { key: { created_at: -1 } } - ]); // Raw documents indexes - await this.db.collection('raw_documents').createIndex( - { content_hash: 1 }, - { unique: true } - ); - await this.db.collection('raw_documents').createIndexes([ - { key: { processing_status: 1 } }, - { key: { document_type: 1 } }, - { key: { created_at: -1 } } - ]); - - this.logger.info('MongoDB indexes created successfully'); - } catch (error) { - this.logger.error('Error creating MongoDB indexes:', error); - throw error; - } - } - - /** - * Get database statistics - */ - async getStats(): Promise { - if (!this.db) { - throw new Error('MongoDB client not connected'); - } - return await this.db.stats(); - } - - /** - * Check if client is connected - */ - get connected(): boolean { - return this.isConnected && !!this.client; - } - - /** - * Get the underlying MongoDB client - */ - get mongoClient(): MongoClient | null { - return this.client; - } - - /** - * Get the database instance - */ - get database(): Db | null { - return this.db; - } - - private buildConfig(config?: Partial): MongoDBClientConfig { - return { - host: config?.host || mongodbConfig.MONGODB_HOST, - port: config?.port || mongodbConfig.MONGODB_PORT, - database: config?.database || mongodbConfig.MONGODB_DATABASE, - username: config?.username || mongodbConfig.MONGODB_USERNAME, - password: config?.password || mongodbConfig.MONGODB_PASSWORD, - authSource: config?.authSource || mongodbConfig.MONGODB_AUTH_SOURCE, - uri: config?.uri || mongodbConfig.MONGODB_URI, - poolSettings: { - maxPoolSize: mongodbConfig.MONGODB_MAX_POOL_SIZE, - minPoolSize: mongodbConfig.MONGODB_MIN_POOL_SIZE, - maxIdleTime: mongodbConfig.MONGODB_MAX_IDLE_TIME, - ...config?.poolSettings - }, - timeouts: { - connectTimeout: mongodbConfig.MONGODB_CONNECT_TIMEOUT, - socketTimeout: mongodbConfig.MONGODB_SOCKET_TIMEOUT, - serverSelectionTimeout: mongodbConfig.MONGODB_SERVER_SELECTION_TIMEOUT, - ...config?.timeouts - }, - tls: { - enabled: mongodbConfig.MONGODB_TLS, - insecure: mongodbConfig.MONGODB_TLS_INSECURE, - caFile: mongodbConfig.MONGODB_TLS_CA_FILE, - ...config?.tls - }, - options: { - retryWrites: mongodbConfig.MONGODB_RETRY_WRITES, - journal: mongodbConfig.MONGODB_JOURNAL, - readPreference: mongodbConfig.MONGODB_READ_PREFERENCE as any, - writeConcern: mongodbConfig.MONGODB_WRITE_CONCERN, - ...config?.options - } - }; - } - - private buildConnectionUri(): string { - if (this.config.uri) { - return this.config.uri; - } - - const { host, port, username, password, database, authSource } = this.config; - const auth = username && password ? `${username}:${password}@` : ''; - const authDb = authSource ? `?authSource=${authSource}` : ''; - - return `mongodb://${auth}${host}:${port}/${database}${authDb}`; - } - - private buildClientOptions(): MongoClientOptions { - return { - maxPoolSize: this.config.poolSettings?.maxPoolSize, - minPoolSize: this.config.poolSettings?.minPoolSize, - maxIdleTimeMS: this.config.poolSettings?.maxIdleTime, - connectTimeoutMS: this.config.timeouts?.connectTimeout, - socketTimeoutMS: this.config.timeouts?.socketTimeout, - serverSelectionTimeoutMS: this.config.timeouts?.serverSelectionTimeout, - retryWrites: this.config.options?.retryWrites, - journal: this.config.options?.journal, - readPreference: this.config.options?.readPreference, writeConcern: this.config.options?.writeConcern ? { - w: this.config.options.writeConcern === 'majority' - ? 'majority' as const - : parseInt(this.config.options.writeConcern, 10) || 1 - } : undefined, - tls: this.config.tls?.enabled, - tlsInsecure: this.config.tls?.insecure, - tlsCAFile: this.config.tls?.caFile - }; - } - - private delay(ms: number): Promise { - return new Promise(resolve => setTimeout(resolve, ms)); - } -} +import { MongoClient, Db, Collection, MongoClientOptions, Document, WithId, OptionalUnlessRequiredId } from 'mongodb'; +import { mongodbConfig } from '@stock-bot/config'; +import { getLogger } from '@stock-bot/logger'; +import type { + MongoDBClientConfig, + MongoDBConnectionOptions, + CollectionNames, + DocumentBase, + SentimentData, + RawDocument, + NewsArticle, + SecFiling, + EarningsTranscript, + AnalystReport +} from './types'; +import { MongoDBHealthMonitor } from './health'; +import { schemaMap } from './schemas'; +import * as yup from 'yup'; + +/** + * MongoDB Client for Stock Bot + * + * Provides type-safe access to MongoDB collections with built-in + * health monitoring, connection pooling, and schema validation. + */ +export class MongoDBClient { + private client: MongoClient | null = null; + private db: Db | null = null; + private readonly config: MongoDBClientConfig; + private readonly options: MongoDBConnectionOptions; + private readonly logger: ReturnType; + private readonly healthMonitor: MongoDBHealthMonitor; + private isConnected = false; + + constructor( + config?: Partial, + options?: MongoDBConnectionOptions + ) { + this.config = this.buildConfig(config); + this.options = { + retryAttempts: 3, + retryDelay: 1000, + healthCheckInterval: 30000, + ...options + }; + + this.logger = getLogger('mongodb-client'); + this.healthMonitor = new MongoDBHealthMonitor(this); + } + + /** + * Connect to MongoDB + */ + async connect(): Promise { + if (this.isConnected && this.client) { + return; + } + + const uri = this.buildConnectionUri(); + const clientOptions = this.buildClientOptions(); + + let lastError: Error | null = null; + + for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) { + try { + this.logger.info(`Connecting to MongoDB (attempt ${attempt}/${this.options.retryAttempts})...`); + + this.client = new MongoClient(uri, clientOptions); + await this.client.connect(); + + // Test the connection + await this.client.db(this.config.database).admin().ping(); + + this.db = this.client.db(this.config.database); + this.isConnected = true; + + this.logger.info('Successfully connected to MongoDB'); + + // Start health monitoring + this.healthMonitor.start(); + + return; + } catch (error) { + lastError = error as Error; + this.logger.error(`MongoDB connection attempt ${attempt} failed:`, error); + + if (this.client) { + await this.client.close(); + this.client = null; + } + + if (attempt < this.options.retryAttempts!) { + await this.delay(this.options.retryDelay! * attempt); + } + } + } + + throw new Error(`Failed to connect to MongoDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`); + } + + /** + * Disconnect from MongoDB + */ + async disconnect(): Promise { + if (!this.client) { + return; + } + + try { + this.healthMonitor.stop(); + await this.client.close(); + this.isConnected = false; + this.client = null; + this.db = null; + this.logger.info('Disconnected from MongoDB'); + } catch (error) { + this.logger.error('Error disconnecting from MongoDB:', error); + throw error; + } + } + + /** + * Get a typed collection + */ + getCollection(name: CollectionNames): Collection { + if (!this.db) { + throw new Error('MongoDB client not connected'); + } + return this.db.collection(name); + } + + /** + * Insert a document with validation + */ + async insertOne( + collectionName: CollectionNames, + document: Omit & Partial> + ): Promise { + const collection = this.getCollection(collectionName); + + // Add timestamps + const now = new Date(); + const docWithTimestamps = { + ...document, + created_at: document.created_at || now, + updated_at: now + } as T; // Validate document if schema exists + if (collectionName in schemaMap) { + try { + (schemaMap as any)[collectionName].validateSync(docWithTimestamps); + } catch (error) { + if (error instanceof yup.ValidationError) { + this.logger.error(`Document validation failed for ${collectionName}:`, error.errors); + throw new Error(`Document validation failed: ${error.errors?.map(e => e).join(', ')}`); + } + throw error; + } + }const result = await collection.insertOne(docWithTimestamps as OptionalUnlessRequiredId); + return { ...docWithTimestamps, _id: result.insertedId } as T; + } + + /** + * Update a document with validation + */ + async updateOne( + collectionName: CollectionNames, + filter: any, + update: Partial + ): Promise { + const collection = this.getCollection(collectionName); + + // Add updated timestamp + const updateWithTimestamp = { + ...update, + updated_at: new Date() + }; + + const result = await collection.updateOne(filter, { $set: updateWithTimestamp }); + return result.modifiedCount > 0; + } + /** + * Find documents with optional validation + */ + async find( + collectionName: CollectionNames, + filter: any = {}, + options: any = {} + ): Promise { + const collection = this.getCollection(collectionName); + return await collection.find(filter, options).toArray() as T[]; + } + + /** + * Find one document + */ + async findOne( + collectionName: CollectionNames, + filter: any + ): Promise { + const collection = this.getCollection(collectionName); + return await collection.findOne(filter) as T | null; + } + + /** + * Aggregate with type safety + */ + async aggregate( + collectionName: CollectionNames, + pipeline: any[] + ): Promise { + const collection = this.getCollection(collectionName); + return await collection.aggregate(pipeline).toArray(); + } + + /** + * Count documents + */ + async countDocuments( + collectionName: CollectionNames, + filter: any = {} + ): Promise { + const collection = this.getCollection(collectionName); + return await collection.countDocuments(filter); + } + + /** + * Create indexes for better performance + */ + async createIndexes(): Promise { + if (!this.db) { + throw new Error('MongoDB client not connected'); + } + + try { + // Sentiment data indexes + await this.db.collection('sentiment_data').createIndexes([ + { key: { symbol: 1, timestamp: -1 } }, + { key: { sentiment_label: 1 } }, + { key: { source_type: 1 } }, + { key: { created_at: -1 } } + ]); + + // News articles indexes + await this.db.collection('news_articles').createIndexes([ + { key: { symbols: 1, published_date: -1 } }, + { key: { publication: 1 } }, + { key: { categories: 1 } }, + { key: { created_at: -1 } } + ]); + + // SEC filings indexes + await this.db.collection('sec_filings').createIndexes([ + { key: { symbols: 1, filing_date: -1 } }, + { key: { filing_type: 1 } }, + { key: { cik: 1 } }, + { key: { created_at: -1 } } + ]); // Raw documents indexes + await this.db.collection('raw_documents').createIndex( + { content_hash: 1 }, + { unique: true } + ); + await this.db.collection('raw_documents').createIndexes([ + { key: { processing_status: 1 } }, + { key: { document_type: 1 } }, + { key: { created_at: -1 } } + ]); + + this.logger.info('MongoDB indexes created successfully'); + } catch (error) { + this.logger.error('Error creating MongoDB indexes:', error); + throw error; + } + } + + /** + * Get database statistics + */ + async getStats(): Promise { + if (!this.db) { + throw new Error('MongoDB client not connected'); + } + return await this.db.stats(); + } + + /** + * Check if client is connected + */ + get connected(): boolean { + return this.isConnected && !!this.client; + } + + /** + * Get the underlying MongoDB client + */ + get mongoClient(): MongoClient | null { + return this.client; + } + + /** + * Get the database instance + */ + get database(): Db | null { + return this.db; + } + + private buildConfig(config?: Partial): MongoDBClientConfig { + return { + host: config?.host || mongodbConfig.MONGODB_HOST, + port: config?.port || mongodbConfig.MONGODB_PORT, + database: config?.database || mongodbConfig.MONGODB_DATABASE, + username: config?.username || mongodbConfig.MONGODB_USERNAME, + password: config?.password || mongodbConfig.MONGODB_PASSWORD, + authSource: config?.authSource || mongodbConfig.MONGODB_AUTH_SOURCE, + uri: config?.uri || mongodbConfig.MONGODB_URI, + poolSettings: { + maxPoolSize: mongodbConfig.MONGODB_MAX_POOL_SIZE, + minPoolSize: mongodbConfig.MONGODB_MIN_POOL_SIZE, + maxIdleTime: mongodbConfig.MONGODB_MAX_IDLE_TIME, + ...config?.poolSettings + }, + timeouts: { + connectTimeout: mongodbConfig.MONGODB_CONNECT_TIMEOUT, + socketTimeout: mongodbConfig.MONGODB_SOCKET_TIMEOUT, + serverSelectionTimeout: mongodbConfig.MONGODB_SERVER_SELECTION_TIMEOUT, + ...config?.timeouts + }, + tls: { + enabled: mongodbConfig.MONGODB_TLS, + insecure: mongodbConfig.MONGODB_TLS_INSECURE, + caFile: mongodbConfig.MONGODB_TLS_CA_FILE, + ...config?.tls + }, + options: { + retryWrites: mongodbConfig.MONGODB_RETRY_WRITES, + journal: mongodbConfig.MONGODB_JOURNAL, + readPreference: mongodbConfig.MONGODB_READ_PREFERENCE as any, + writeConcern: mongodbConfig.MONGODB_WRITE_CONCERN, + ...config?.options + } + }; + } + + private buildConnectionUri(): string { + if (this.config.uri) { + return this.config.uri; + } + + const { host, port, username, password, database, authSource } = this.config; + const auth = username && password ? `${username}:${password}@` : ''; + const authDb = authSource ? `?authSource=${authSource}` : ''; + + return `mongodb://${auth}${host}:${port}/${database}${authDb}`; + } + + private buildClientOptions(): MongoClientOptions { + return { + maxPoolSize: this.config.poolSettings?.maxPoolSize, + minPoolSize: this.config.poolSettings?.minPoolSize, + maxIdleTimeMS: this.config.poolSettings?.maxIdleTime, + connectTimeoutMS: this.config.timeouts?.connectTimeout, + socketTimeoutMS: this.config.timeouts?.socketTimeout, + serverSelectionTimeoutMS: this.config.timeouts?.serverSelectionTimeout, + retryWrites: this.config.options?.retryWrites, + journal: this.config.options?.journal, + readPreference: this.config.options?.readPreference, writeConcern: this.config.options?.writeConcern ? { + w: this.config.options.writeConcern === 'majority' + ? 'majority' as const + : parseInt(this.config.options.writeConcern, 10) || 1 + } : undefined, + tls: this.config.tls?.enabled, + tlsInsecure: this.config.tls?.insecure, + tlsCAFile: this.config.tls?.caFile + }; + } + + private delay(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); + } +} diff --git a/libs/mongodb-client/src/factory.ts b/libs/mongodb-client/src/factory.ts index d00c3fc..2d50982 100644 --- a/libs/mongodb-client/src/factory.ts +++ b/libs/mongodb-client/src/factory.ts @@ -1,66 +1,66 @@ -import { MongoDBClient } from './client'; -import { mongodbConfig } from '@stock-bot/config'; -import type { MongoDBClientConfig, MongoDBConnectionOptions } from './types'; - -/** - * Factory function to create a MongoDB client instance - */ -export function createMongoDBClient( - config?: Partial, - options?: MongoDBConnectionOptions -): MongoDBClient { - return new MongoDBClient(config, options); -} - -/** - * Create a MongoDB client with default configuration - */ -export function createDefaultMongoDBClient(): MongoDBClient { - const config: Partial = { - host: mongodbConfig.MONGODB_HOST, - port: mongodbConfig.MONGODB_PORT, - database: mongodbConfig.MONGODB_DATABASE, - username: mongodbConfig.MONGODB_USERNAME, - password: mongodbConfig.MONGODB_PASSWORD, - uri: mongodbConfig.MONGODB_URI - }; - - return new MongoDBClient(config); -} - -/** - * Singleton MongoDB client instance - */ -let defaultClient: MongoDBClient | null = null; - -/** - * Get or create the default MongoDB client instance - */ -export function getMongoDBClient(): MongoDBClient { - if (!defaultClient) { - defaultClient = createDefaultMongoDBClient(); - } - return defaultClient; -} - -/** - * Connect to MongoDB using the default client - */ -export async function connectMongoDB(): Promise { - const client = getMongoDBClient(); - if (!client.connected) { - await client.connect(); - await client.createIndexes(); - } - return client; -} - -/** - * Disconnect from MongoDB - */ -export async function disconnectMongoDB(): Promise { - if (defaultClient) { - await defaultClient.disconnect(); - defaultClient = null; - } -} +import { MongoDBClient } from './client'; +import { mongodbConfig } from '@stock-bot/config'; +import type { MongoDBClientConfig, MongoDBConnectionOptions } from './types'; + +/** + * Factory function to create a MongoDB client instance + */ +export function createMongoDBClient( + config?: Partial, + options?: MongoDBConnectionOptions +): MongoDBClient { + return new MongoDBClient(config, options); +} + +/** + * Create a MongoDB client with default configuration + */ +export function createDefaultMongoDBClient(): MongoDBClient { + const config: Partial = { + host: mongodbConfig.MONGODB_HOST, + port: mongodbConfig.MONGODB_PORT, + database: mongodbConfig.MONGODB_DATABASE, + username: mongodbConfig.MONGODB_USERNAME, + password: mongodbConfig.MONGODB_PASSWORD, + uri: mongodbConfig.MONGODB_URI + }; + + return new MongoDBClient(config); +} + +/** + * Singleton MongoDB client instance + */ +let defaultClient: MongoDBClient | null = null; + +/** + * Get or create the default MongoDB client instance + */ +export function getMongoDBClient(): MongoDBClient { + if (!defaultClient) { + defaultClient = createDefaultMongoDBClient(); + } + return defaultClient; +} + +/** + * Connect to MongoDB using the default client + */ +export async function connectMongoDB(): Promise { + const client = getMongoDBClient(); + if (!client.connected) { + await client.connect(); + await client.createIndexes(); + } + return client; +} + +/** + * Disconnect from MongoDB + */ +export async function disconnectMongoDB(): Promise { + if (defaultClient) { + await defaultClient.disconnect(); + defaultClient = null; + } +} diff --git a/libs/mongodb-client/src/health.ts b/libs/mongodb-client/src/health.ts index e787bc7..fc17005 100644 --- a/libs/mongodb-client/src/health.ts +++ b/libs/mongodb-client/src/health.ts @@ -1,226 +1,226 @@ -import { getLogger } from '@stock-bot/logger'; -import type { MongoDBClient } from './client'; -import type { MongoDBHealthCheck, MongoDBHealthStatus, MongoDBMetrics } from './types'; - -/** - * MongoDB Health Monitor - * - * Monitors MongoDB connection health and provides metrics - */ -export class MongoDBHealthMonitor { - private readonly client: MongoDBClient; - private readonly logger: ReturnType; - private healthCheckInterval: NodeJS.Timeout | null = null; - private metrics: MongoDBMetrics; - private lastHealthCheck: MongoDBHealthCheck | null = null; - - constructor(client: MongoDBClient) { - this.client = client; - this.logger = getLogger('mongodb-health-monitor'); - this.metrics = { - operationsPerSecond: 0, - averageLatency: 0, - errorRate: 0, - connectionPoolUtilization: 0, - documentsProcessed: 0 - }; - } - - /** - * Start health monitoring - */ - start(intervalMs: number = 30000): void { - if (this.healthCheckInterval) { - this.stop(); - } - - this.logger.info(`Starting MongoDB health monitoring (interval: ${intervalMs}ms)`); - - this.healthCheckInterval = setInterval(async () => { - try { - await this.performHealthCheck(); - } catch (error) { - this.logger.error('Health check failed:', error); - } - }, intervalMs); - - // Perform initial health check - this.performHealthCheck().catch(error => { - this.logger.error('Initial health check failed:', error); - }); - } - - /** - * Stop health monitoring - */ - stop(): void { - if (this.healthCheckInterval) { - clearInterval(this.healthCheckInterval); - this.healthCheckInterval = null; - this.logger.info('Stopped MongoDB health monitoring'); - } - } - - /** - * Get current health status - */ - async getHealth(): Promise { - if (!this.lastHealthCheck) { - await this.performHealthCheck(); - } - return this.lastHealthCheck!; - } - - /** - * Get current metrics - */ - getMetrics(): MongoDBMetrics { - return { ...this.metrics }; - } - - /** - * Perform a health check - */ - private async performHealthCheck(): Promise { - const startTime = Date.now(); - const errors: string[] = []; - let status: MongoDBHealthStatus = 'healthy'; - - try { - if (!this.client.connected) { - errors.push('MongoDB client not connected'); - status = 'unhealthy'; - } else { - // Test basic connectivity - const mongoClient = this.client.mongoClient; - const db = this.client.database; - - if (!mongoClient || !db) { - errors.push('MongoDB client or database not available'); - status = 'unhealthy'; - } else { - // Ping the database - await db.admin().ping(); - - // Get server status for metrics - try { - const serverStatus = await db.admin().serverStatus(); - this.updateMetricsFromServerStatus(serverStatus); - - // Check connection pool status - const poolStats = this.getConnectionPoolStats(serverStatus); - - if (poolStats.utilization > 0.9) { - errors.push('High connection pool utilization'); - status = status === 'healthy' ? 'degraded' : status; - } - - // Check for high latency - const latency = Date.now() - startTime; - if (latency > 1000) { - errors.push(`High latency: ${latency}ms`); - status = status === 'healthy' ? 'degraded' : status; - } - - } catch (statusError) { - errors.push(`Failed to get server status: ${(statusError as Error).message}`); - status = 'degraded'; - } - } - } - } catch (error) { - errors.push(`Health check failed: ${(error as Error).message}`); - status = 'unhealthy'; - } - - const latency = Date.now() - startTime; - - // Get connection stats - const connectionStats = this.getConnectionStats(); - - this.lastHealthCheck = { - status, - timestamp: new Date(), - latency, - connections: connectionStats, - errors: errors.length > 0 ? errors : undefined - }; - - // Log health status changes - if (status !== 'healthy') { - this.logger.warn(`MongoDB health status: ${status}`, { errors, latency }); - } else { - this.logger.debug(`MongoDB health check passed (${latency}ms)`); - } - } - - /** - * Update metrics from MongoDB server status - */ - private updateMetricsFromServerStatus(serverStatus: any): void { - try { - const opcounters = serverStatus.opcounters || {}; - const connections = serverStatus.connections || {}; - const dur = serverStatus.dur || {}; - - // Calculate operations per second (approximate) - const totalOps = Object.values(opcounters).reduce((sum: number, count: any) => sum + (count || 0), 0); - this.metrics.operationsPerSecond = totalOps; - - // Connection pool utilization - if (connections.current && connections.available) { - const total = connections.current + connections.available; - this.metrics.connectionPoolUtilization = connections.current / total; - } - - // Average latency (from durability stats if available) - if (dur.timeMS) { - this.metrics.averageLatency = dur.timeMS.dt || 0; - } } catch (error) { - this.logger.debug('Error parsing server status for metrics:', error as any); - } - } - - /** - * Get connection pool statistics - */ - private getConnectionPoolStats(serverStatus: any): { utilization: number; active: number; available: number } { - const connections = serverStatus.connections || {}; - const active = connections.current || 0; - const available = connections.available || 0; - const total = active + available; - - return { - utilization: total > 0 ? active / total : 0, - active, - available - }; - } - - /** - * Get connection statistics - */ - private getConnectionStats(): { active: number; available: number; total: number } { - // This would ideally come from the MongoDB driver's connection pool - // For now, we'll return estimated values - return { - active: 1, - available: 9, - total: 10 - }; - } - - /** - * Update error rate metric - */ - updateErrorRate(errorCount: number, totalOperations: number): void { - this.metrics.errorRate = totalOperations > 0 ? errorCount / totalOperations : 0; - } - - /** - * Update documents processed metric - */ - updateDocumentsProcessed(count: number): void { - this.metrics.documentsProcessed += count; - } -} +import { getLogger } from '@stock-bot/logger'; +import type { MongoDBClient } from './client'; +import type { MongoDBHealthCheck, MongoDBHealthStatus, MongoDBMetrics } from './types'; + +/** + * MongoDB Health Monitor + * + * Monitors MongoDB connection health and provides metrics + */ +export class MongoDBHealthMonitor { + private readonly client: MongoDBClient; + private readonly logger: ReturnType; + private healthCheckInterval: NodeJS.Timeout | null = null; + private metrics: MongoDBMetrics; + private lastHealthCheck: MongoDBHealthCheck | null = null; + + constructor(client: MongoDBClient) { + this.client = client; + this.logger = getLogger('mongodb-health-monitor'); + this.metrics = { + operationsPerSecond: 0, + averageLatency: 0, + errorRate: 0, + connectionPoolUtilization: 0, + documentsProcessed: 0 + }; + } + + /** + * Start health monitoring + */ + start(intervalMs: number = 30000): void { + if (this.healthCheckInterval) { + this.stop(); + } + + this.logger.info(`Starting MongoDB health monitoring (interval: ${intervalMs}ms)`); + + this.healthCheckInterval = setInterval(async () => { + try { + await this.performHealthCheck(); + } catch (error) { + this.logger.error('Health check failed:', error); + } + }, intervalMs); + + // Perform initial health check + this.performHealthCheck().catch(error => { + this.logger.error('Initial health check failed:', error); + }); + } + + /** + * Stop health monitoring + */ + stop(): void { + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval); + this.healthCheckInterval = null; + this.logger.info('Stopped MongoDB health monitoring'); + } + } + + /** + * Get current health status + */ + async getHealth(): Promise { + if (!this.lastHealthCheck) { + await this.performHealthCheck(); + } + return this.lastHealthCheck!; + } + + /** + * Get current metrics + */ + getMetrics(): MongoDBMetrics { + return { ...this.metrics }; + } + + /** + * Perform a health check + */ + private async performHealthCheck(): Promise { + const startTime = Date.now(); + const errors: string[] = []; + let status: MongoDBHealthStatus = 'healthy'; + + try { + if (!this.client.connected) { + errors.push('MongoDB client not connected'); + status = 'unhealthy'; + } else { + // Test basic connectivity + const mongoClient = this.client.mongoClient; + const db = this.client.database; + + if (!mongoClient || !db) { + errors.push('MongoDB client or database not available'); + status = 'unhealthy'; + } else { + // Ping the database + await db.admin().ping(); + + // Get server status for metrics + try { + const serverStatus = await db.admin().serverStatus(); + this.updateMetricsFromServerStatus(serverStatus); + + // Check connection pool status + const poolStats = this.getConnectionPoolStats(serverStatus); + + if (poolStats.utilization > 0.9) { + errors.push('High connection pool utilization'); + status = status === 'healthy' ? 'degraded' : status; + } + + // Check for high latency + const latency = Date.now() - startTime; + if (latency > 1000) { + errors.push(`High latency: ${latency}ms`); + status = status === 'healthy' ? 'degraded' : status; + } + + } catch (statusError) { + errors.push(`Failed to get server status: ${(statusError as Error).message}`); + status = 'degraded'; + } + } + } + } catch (error) { + errors.push(`Health check failed: ${(error as Error).message}`); + status = 'unhealthy'; + } + + const latency = Date.now() - startTime; + + // Get connection stats + const connectionStats = this.getConnectionStats(); + + this.lastHealthCheck = { + status, + timestamp: new Date(), + latency, + connections: connectionStats, + errors: errors.length > 0 ? errors : undefined + }; + + // Log health status changes + if (status !== 'healthy') { + this.logger.warn(`MongoDB health status: ${status}`, { errors, latency }); + } else { + this.logger.debug(`MongoDB health check passed (${latency}ms)`); + } + } + + /** + * Update metrics from MongoDB server status + */ + private updateMetricsFromServerStatus(serverStatus: any): void { + try { + const opcounters = serverStatus.opcounters || {}; + const connections = serverStatus.connections || {}; + const dur = serverStatus.dur || {}; + + // Calculate operations per second (approximate) + const totalOps = Object.values(opcounters).reduce((sum: number, count: any) => sum + (count || 0), 0); + this.metrics.operationsPerSecond = totalOps; + + // Connection pool utilization + if (connections.current && connections.available) { + const total = connections.current + connections.available; + this.metrics.connectionPoolUtilization = connections.current / total; + } + + // Average latency (from durability stats if available) + if (dur.timeMS) { + this.metrics.averageLatency = dur.timeMS.dt || 0; + } } catch (error) { + this.logger.debug('Error parsing server status for metrics:', error as any); + } + } + + /** + * Get connection pool statistics + */ + private getConnectionPoolStats(serverStatus: any): { utilization: number; active: number; available: number } { + const connections = serverStatus.connections || {}; + const active = connections.current || 0; + const available = connections.available || 0; + const total = active + available; + + return { + utilization: total > 0 ? active / total : 0, + active, + available + }; + } + + /** + * Get connection statistics + */ + private getConnectionStats(): { active: number; available: number; total: number } { + // This would ideally come from the MongoDB driver's connection pool + // For now, we'll return estimated values + return { + active: 1, + available: 9, + total: 10 + }; + } + + /** + * Update error rate metric + */ + updateErrorRate(errorCount: number, totalOperations: number): void { + this.metrics.errorRate = totalOperations > 0 ? errorCount / totalOperations : 0; + } + + /** + * Update documents processed metric + */ + updateDocumentsProcessed(count: number): void { + this.metrics.documentsProcessed += count; + } +} diff --git a/libs/mongodb-client/src/index.ts b/libs/mongodb-client/src/index.ts index b71a5ef..bbfa755 100644 --- a/libs/mongodb-client/src/index.ts +++ b/libs/mongodb-client/src/index.ts @@ -1,40 +1,40 @@ -/** - * MongoDB Client Library for Stock Bot - * - * Provides type-safe MongoDB access for document storage, sentiment data, - * and raw content processing. - */ - -export { MongoDBClient } from './client'; -export { MongoDBHealthMonitor } from './health'; -export { MongoDBTransactionManager } from './transactions'; -export { MongoDBAggregationBuilder } from './aggregation'; - -// Types -export type { - MongoDBClientConfig, - MongoDBConnectionOptions, - MongoDBHealthStatus, - MongoDBMetrics, - CollectionNames, - DocumentBase, - SentimentData, - RawDocument, - NewsArticle, - SecFiling, - EarningsTranscript, - AnalystReport -} from './types'; - -// Schemas -export { - sentimentDataSchema, - rawDocumentSchema, - newsArticleSchema, - secFilingSchema, - earningsTranscriptSchema, - analystReportSchema -} from './schemas'; - -// Utils -export { createMongoDBClient } from './factory'; +/** + * MongoDB Client Library for Stock Bot + * + * Provides type-safe MongoDB access for document storage, sentiment data, + * and raw content processing. + */ + +export { MongoDBClient } from './client'; +export { MongoDBHealthMonitor } from './health'; +export { MongoDBTransactionManager } from './transactions'; +export { MongoDBAggregationBuilder } from './aggregation'; + +// Types +export type { + MongoDBClientConfig, + MongoDBConnectionOptions, + MongoDBHealthStatus, + MongoDBMetrics, + CollectionNames, + DocumentBase, + SentimentData, + RawDocument, + NewsArticle, + SecFiling, + EarningsTranscript, + AnalystReport +} from './types'; + +// Schemas +export { + sentimentDataSchema, + rawDocumentSchema, + newsArticleSchema, + secFilingSchema, + earningsTranscriptSchema, + analystReportSchema +} from './schemas'; + +// Utils +export { createMongoDBClient } from './factory'; diff --git a/libs/mongodb-client/src/schemas.ts b/libs/mongodb-client/src/schemas.ts index 4a90348..ce28c1b 100644 --- a/libs/mongodb-client/src/schemas.ts +++ b/libs/mongodb-client/src/schemas.ts @@ -1,132 +1,132 @@ -import * as yup from 'yup'; - -/** - * Yup Schemas for MongoDB Document Validation - */ - -// Base schema for all documents -export const documentBaseSchema = yup.object({ - _id: yup.mixed().optional(), - created_at: yup.date().required(), - updated_at: yup.date().required(), - source: yup.string().required(), - metadata: yup.object().optional(), -}); - -// Sentiment Data Schema -export const sentimentDataSchema = documentBaseSchema.shape({ - symbol: yup.string().min(1).max(10).required(), - sentiment_score: yup.number().min(-1).max(1).required(), - sentiment_label: yup.string().oneOf(['positive', 'negative', 'neutral']).required(), - confidence: yup.number().min(0).max(1).required(), - text: yup.string().min(1).required(), - source_type: yup.string().oneOf(['reddit', 'twitter', 'news', 'forums']).required(), - source_id: yup.string().required(), - timestamp: yup.date().required(), - processed_at: yup.date().required(), - language: yup.string().default('en'), - keywords: yup.array(yup.string()).required(), - entities: yup.array(yup.object({ - name: yup.string().required(), - type: yup.string().required(), - confidence: yup.number().min(0).max(1).required(), - })).required(), -}); - -// Raw Document Schema -export const rawDocumentSchema = documentBaseSchema.shape({ - document_type: yup.string().oneOf(['html', 'pdf', 'text', 'json', 'xml']).required(), - content: yup.string().required(), - content_hash: yup.string().required(), - url: yup.string().url().optional(), - title: yup.string().optional(), - author: yup.string().optional(), - published_date: yup.date().optional(), - extracted_text: yup.string().optional(), - processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(), - size_bytes: yup.number().positive().required(), - language: yup.string().optional(), -}); - -// News Article Schema -export const newsArticleSchema = documentBaseSchema.shape({ - headline: yup.string().min(1).required(), - content: yup.string().min(1).required(), - summary: yup.string().optional(), - author: yup.string().required(), - publication: yup.string().required(), - published_date: yup.date().required(), - url: yup.string().url().required(), - symbols: yup.array(yup.string()).required(), - categories: yup.array(yup.string()).required(), - sentiment_score: yup.number().min(-1).max(1).optional(), - relevance_score: yup.number().min(0).max(1).optional(), - image_url: yup.string().url().optional(), - tags: yup.array(yup.string()).required(), -}); - -// SEC Filing Schema -export const secFilingSchema = documentBaseSchema.shape({ - cik: yup.string().required(), - accession_number: yup.string().required(), - filing_type: yup.string().required(), - company_name: yup.string().required(), - symbols: yup.array(yup.string()).required(), - filing_date: yup.date().required(), - period_end_date: yup.date().required(), - url: yup.string().url().required(), - content: yup.string().required(), - extracted_data: yup.object().optional(), - financial_statements: yup.array(yup.object({ - statement_type: yup.string().required(), - data: yup.object().required(), - })).optional(), - processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(), -}); - -// Earnings Transcript Schema -export const earningsTranscriptSchema = documentBaseSchema.shape({ - symbol: yup.string().min(1).max(10).required(), - company_name: yup.string().required(), - quarter: yup.string().required(), - year: yup.number().min(2000).max(3000).required(), - call_date: yup.date().required(), - transcript: yup.string().required(), - participants: yup.array(yup.object({ - name: yup.string().required(), - title: yup.string().required(), - type: yup.string().oneOf(['executive', 'analyst']).required(), - })).required(), - key_topics: yup.array(yup.string()).required(), - sentiment_analysis: yup.object({ - overall_sentiment: yup.number().min(-1).max(1).required(), - topic_sentiments: yup.object().required(), - }).optional(), - financial_highlights: yup.object().optional(), -}); - -// Analyst Report Schema -export const analystReportSchema = documentBaseSchema.shape({ - symbol: yup.string().min(1).max(10).required(), - analyst_firm: yup.string().required(), - analyst_name: yup.string().required(), - report_title: yup.string().required(), - report_date: yup.date().required(), - rating: yup.string().oneOf(['buy', 'hold', 'sell', 'strong_buy', 'strong_sell']).required(), - price_target: yup.number().positive().optional(), - previous_rating: yup.string().optional(), - content: yup.string().required(), - summary: yup.string().required(), - key_points: yup.array(yup.string()).required(), - financial_projections: yup.object().optional(), -}); - -// Schema mapping for collections -export const schemaMap = { - sentiment_data: sentimentDataSchema, - raw_documents: rawDocumentSchema, - news_articles: newsArticleSchema, - sec_filings: secFilingSchema, - earnings_transcripts: earningsTranscriptSchema, - analyst_reports: analystReportSchema, -} as const; +import * as yup from 'yup'; + +/** + * Yup Schemas for MongoDB Document Validation + */ + +// Base schema for all documents +export const documentBaseSchema = yup.object({ + _id: yup.mixed().optional(), + created_at: yup.date().required(), + updated_at: yup.date().required(), + source: yup.string().required(), + metadata: yup.object().optional(), +}); + +// Sentiment Data Schema +export const sentimentDataSchema = documentBaseSchema.shape({ + symbol: yup.string().min(1).max(10).required(), + sentiment_score: yup.number().min(-1).max(1).required(), + sentiment_label: yup.string().oneOf(['positive', 'negative', 'neutral']).required(), + confidence: yup.number().min(0).max(1).required(), + text: yup.string().min(1).required(), + source_type: yup.string().oneOf(['reddit', 'twitter', 'news', 'forums']).required(), + source_id: yup.string().required(), + timestamp: yup.date().required(), + processed_at: yup.date().required(), + language: yup.string().default('en'), + keywords: yup.array(yup.string()).required(), + entities: yup.array(yup.object({ + name: yup.string().required(), + type: yup.string().required(), + confidence: yup.number().min(0).max(1).required(), + })).required(), +}); + +// Raw Document Schema +export const rawDocumentSchema = documentBaseSchema.shape({ + document_type: yup.string().oneOf(['html', 'pdf', 'text', 'json', 'xml']).required(), + content: yup.string().required(), + content_hash: yup.string().required(), + url: yup.string().url().optional(), + title: yup.string().optional(), + author: yup.string().optional(), + published_date: yup.date().optional(), + extracted_text: yup.string().optional(), + processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(), + size_bytes: yup.number().positive().required(), + language: yup.string().optional(), +}); + +// News Article Schema +export const newsArticleSchema = documentBaseSchema.shape({ + headline: yup.string().min(1).required(), + content: yup.string().min(1).required(), + summary: yup.string().optional(), + author: yup.string().required(), + publication: yup.string().required(), + published_date: yup.date().required(), + url: yup.string().url().required(), + symbols: yup.array(yup.string()).required(), + categories: yup.array(yup.string()).required(), + sentiment_score: yup.number().min(-1).max(1).optional(), + relevance_score: yup.number().min(0).max(1).optional(), + image_url: yup.string().url().optional(), + tags: yup.array(yup.string()).required(), +}); + +// SEC Filing Schema +export const secFilingSchema = documentBaseSchema.shape({ + cik: yup.string().required(), + accession_number: yup.string().required(), + filing_type: yup.string().required(), + company_name: yup.string().required(), + symbols: yup.array(yup.string()).required(), + filing_date: yup.date().required(), + period_end_date: yup.date().required(), + url: yup.string().url().required(), + content: yup.string().required(), + extracted_data: yup.object().optional(), + financial_statements: yup.array(yup.object({ + statement_type: yup.string().required(), + data: yup.object().required(), + })).optional(), + processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(), +}); + +// Earnings Transcript Schema +export const earningsTranscriptSchema = documentBaseSchema.shape({ + symbol: yup.string().min(1).max(10).required(), + company_name: yup.string().required(), + quarter: yup.string().required(), + year: yup.number().min(2000).max(3000).required(), + call_date: yup.date().required(), + transcript: yup.string().required(), + participants: yup.array(yup.object({ + name: yup.string().required(), + title: yup.string().required(), + type: yup.string().oneOf(['executive', 'analyst']).required(), + })).required(), + key_topics: yup.array(yup.string()).required(), + sentiment_analysis: yup.object({ + overall_sentiment: yup.number().min(-1).max(1).required(), + topic_sentiments: yup.object().required(), + }).optional(), + financial_highlights: yup.object().optional(), +}); + +// Analyst Report Schema +export const analystReportSchema = documentBaseSchema.shape({ + symbol: yup.string().min(1).max(10).required(), + analyst_firm: yup.string().required(), + analyst_name: yup.string().required(), + report_title: yup.string().required(), + report_date: yup.date().required(), + rating: yup.string().oneOf(['buy', 'hold', 'sell', 'strong_buy', 'strong_sell']).required(), + price_target: yup.number().positive().optional(), + previous_rating: yup.string().optional(), + content: yup.string().required(), + summary: yup.string().required(), + key_points: yup.array(yup.string()).required(), + financial_projections: yup.object().optional(), +}); + +// Schema mapping for collections +export const schemaMap = { + sentiment_data: sentimentDataSchema, + raw_documents: rawDocumentSchema, + news_articles: newsArticleSchema, + sec_filings: secFilingSchema, + earnings_transcripts: earningsTranscriptSchema, + analyst_reports: analystReportSchema, +} as const; diff --git a/libs/mongodb-client/src/transactions.ts b/libs/mongodb-client/src/transactions.ts index 1d9966b..7c4abac 100644 --- a/libs/mongodb-client/src/transactions.ts +++ b/libs/mongodb-client/src/transactions.ts @@ -1,238 +1,238 @@ -import { getLogger } from '@stock-bot/logger'; -import type { MongoDBClient } from './client'; -import type { CollectionNames, DocumentBase } from './types'; -import type { WithId, OptionalUnlessRequiredId } from 'mongodb'; - -/** - * MongoDB Transaction Manager - * - * Provides transaction support for multi-document operations - */ -export class MongoDBTransactionManager { - private readonly client: MongoDBClient; - private readonly logger: ReturnType; - - constructor(client: MongoDBClient) { - this.client = client; - this.logger = getLogger('mongodb-transaction-manager'); - } - - /** - * Execute operations within a transaction - */ - async withTransaction( - operations: (session: any) => Promise, - options?: { - readPreference?: string; - readConcern?: string; - writeConcern?: any; - maxCommitTimeMS?: number; - } - ): Promise { - const mongoClient = this.client.mongoClient; - if (!mongoClient) { - throw new Error('MongoDB client not connected'); - } - - const session = mongoClient.startSession(); - - try { - this.logger.debug('Starting MongoDB transaction'); - - const result = await session.withTransaction( - async () => { - return await operations(session); - }, { - readPreference: options?.readPreference as any, - readConcern: { level: options?.readConcern || 'majority' } as any, - writeConcern: options?.writeConcern || { w: 'majority' }, - maxCommitTimeMS: options?.maxCommitTimeMS || 10000 - } - ); - - this.logger.debug('MongoDB transaction completed successfully'); - return result; - - } catch (error) { - this.logger.error('MongoDB transaction failed:', error); - throw error; - } finally { - await session.endSession(); - } - } - - /** - * Batch insert documents across collections within a transaction - */ - async batchInsert( - operations: Array<{ - collection: CollectionNames; - documents: DocumentBase[]; - }>, - options?: { ordered?: boolean; bypassDocumentValidation?: boolean } - ): Promise { - await this.withTransaction(async (session) => { - for (const operation of operations) { - const collection = this.client.getCollection(operation.collection); - - // Add timestamps to all documents - const now = new Date(); - const documentsWithTimestamps = operation.documents.map(doc => ({ - ...doc, - created_at: doc.created_at || now, - updated_at: now - })); - - await collection.insertMany(documentsWithTimestamps, { - session, - ordered: options?.ordered ?? true, - bypassDocumentValidation: options?.bypassDocumentValidation ?? false - }); - - this.logger.debug(`Inserted ${documentsWithTimestamps.length} documents into ${operation.collection}`); - } - }); - } - - /** - * Batch update documents across collections within a transaction - */ - async batchUpdate( - operations: Array<{ - collection: CollectionNames; - filter: any; - update: any; - options?: any; - }> - ): Promise { - await this.withTransaction(async (session) => { - const results = []; - - for (const operation of operations) { - const collection = this.client.getCollection(operation.collection); - - // Add updated timestamp - const updateWithTimestamp = { - ...operation.update, - $set: { - ...operation.update.$set, - updated_at: new Date() - } - }; - - const result = await collection.updateMany( - operation.filter, - updateWithTimestamp, - { - session, - ...operation.options - } - ); - - results.push(result); - this.logger.debug(`Updated ${result.modifiedCount} documents in ${operation.collection}`); - } - - return results; - }); - } - - /** - * Move documents between collections within a transaction - */ - async moveDocuments( - fromCollection: CollectionNames, - toCollection: CollectionNames, - filter: any, - transform?: (doc: T) => T - ): Promise { - return await this.withTransaction(async (session) => { - const sourceCollection = this.client.getCollection(fromCollection); - const targetCollection = this.client.getCollection(toCollection); - - // Find documents to move - const documents = await sourceCollection.find(filter, { session }).toArray(); - - if (documents.length === 0) { - return 0; - } // Transform documents if needed - const documentsToInsert = transform - ? documents.map((doc: WithId) => transform(doc as T)) - : documents; - - // Add updated timestamp - const now = new Date(); - documentsToInsert.forEach(doc => { - doc.updated_at = now; - }); // Insert into target collection - await targetCollection.insertMany(documentsToInsert as OptionalUnlessRequiredId[], { session }); - - // Remove from source collection - const deleteResult = await sourceCollection.deleteMany(filter, { session }); - - this.logger.info(`Moved ${documents.length} documents from ${fromCollection} to ${toCollection}`); - - return deleteResult.deletedCount || 0; - }); - } - - /** - * Archive old documents within a transaction - */ - async archiveDocuments( - sourceCollection: CollectionNames, - archiveCollection: CollectionNames, - cutoffDate: Date, - batchSize: number = 1000 - ): Promise { - let totalArchived = 0; - - while (true) { - const batchArchived = await this.withTransaction(async (session) => { - const collection = this.client.getCollection(sourceCollection); - const archiveCol = this.client.getCollection(archiveCollection); - - // Find old documents - const documents = await collection.find( - { created_at: { $lt: cutoffDate } }, - { limit: batchSize, session } - ).toArray(); - - if (documents.length === 0) { - return 0; - } - - // Add archive metadata - const now = new Date(); - const documentsToArchive = documents.map(doc => ({ - ...doc, - archived_at: now, - archived_from: sourceCollection - })); - - // Insert into archive collection - await archiveCol.insertMany(documentsToArchive, { session }); - - // Remove from source collection - const ids = documents.map(doc => doc._id); - const deleteResult = await collection.deleteMany( - { _id: { $in: ids } }, - { session } - ); - - return deleteResult.deletedCount || 0; - }); - - totalArchived += batchArchived; - - if (batchArchived === 0) { - break; - } - - this.logger.debug(`Archived batch of ${batchArchived} documents`); - } - - this.logger.info(`Archived ${totalArchived} documents from ${sourceCollection} to ${archiveCollection}`); - return totalArchived; - } -} +import { getLogger } from '@stock-bot/logger'; +import type { MongoDBClient } from './client'; +import type { CollectionNames, DocumentBase } from './types'; +import type { WithId, OptionalUnlessRequiredId } from 'mongodb'; + +/** + * MongoDB Transaction Manager + * + * Provides transaction support for multi-document operations + */ +export class MongoDBTransactionManager { + private readonly client: MongoDBClient; + private readonly logger: ReturnType; + + constructor(client: MongoDBClient) { + this.client = client; + this.logger = getLogger('mongodb-transaction-manager'); + } + + /** + * Execute operations within a transaction + */ + async withTransaction( + operations: (session: any) => Promise, + options?: { + readPreference?: string; + readConcern?: string; + writeConcern?: any; + maxCommitTimeMS?: number; + } + ): Promise { + const mongoClient = this.client.mongoClient; + if (!mongoClient) { + throw new Error('MongoDB client not connected'); + } + + const session = mongoClient.startSession(); + + try { + this.logger.debug('Starting MongoDB transaction'); + + const result = await session.withTransaction( + async () => { + return await operations(session); + }, { + readPreference: options?.readPreference as any, + readConcern: { level: options?.readConcern || 'majority' } as any, + writeConcern: options?.writeConcern || { w: 'majority' }, + maxCommitTimeMS: options?.maxCommitTimeMS || 10000 + } + ); + + this.logger.debug('MongoDB transaction completed successfully'); + return result; + + } catch (error) { + this.logger.error('MongoDB transaction failed:', error); + throw error; + } finally { + await session.endSession(); + } + } + + /** + * Batch insert documents across collections within a transaction + */ + async batchInsert( + operations: Array<{ + collection: CollectionNames; + documents: DocumentBase[]; + }>, + options?: { ordered?: boolean; bypassDocumentValidation?: boolean } + ): Promise { + await this.withTransaction(async (session) => { + for (const operation of operations) { + const collection = this.client.getCollection(operation.collection); + + // Add timestamps to all documents + const now = new Date(); + const documentsWithTimestamps = operation.documents.map(doc => ({ + ...doc, + created_at: doc.created_at || now, + updated_at: now + })); + + await collection.insertMany(documentsWithTimestamps, { + session, + ordered: options?.ordered ?? true, + bypassDocumentValidation: options?.bypassDocumentValidation ?? false + }); + + this.logger.debug(`Inserted ${documentsWithTimestamps.length} documents into ${operation.collection}`); + } + }); + } + + /** + * Batch update documents across collections within a transaction + */ + async batchUpdate( + operations: Array<{ + collection: CollectionNames; + filter: any; + update: any; + options?: any; + }> + ): Promise { + await this.withTransaction(async (session) => { + const results = []; + + for (const operation of operations) { + const collection = this.client.getCollection(operation.collection); + + // Add updated timestamp + const updateWithTimestamp = { + ...operation.update, + $set: { + ...operation.update.$set, + updated_at: new Date() + } + }; + + const result = await collection.updateMany( + operation.filter, + updateWithTimestamp, + { + session, + ...operation.options + } + ); + + results.push(result); + this.logger.debug(`Updated ${result.modifiedCount} documents in ${operation.collection}`); + } + + return results; + }); + } + + /** + * Move documents between collections within a transaction + */ + async moveDocuments( + fromCollection: CollectionNames, + toCollection: CollectionNames, + filter: any, + transform?: (doc: T) => T + ): Promise { + return await this.withTransaction(async (session) => { + const sourceCollection = this.client.getCollection(fromCollection); + const targetCollection = this.client.getCollection(toCollection); + + // Find documents to move + const documents = await sourceCollection.find(filter, { session }).toArray(); + + if (documents.length === 0) { + return 0; + } // Transform documents if needed + const documentsToInsert = transform + ? documents.map((doc: WithId) => transform(doc as T)) + : documents; + + // Add updated timestamp + const now = new Date(); + documentsToInsert.forEach(doc => { + doc.updated_at = now; + }); // Insert into target collection + await targetCollection.insertMany(documentsToInsert as OptionalUnlessRequiredId[], { session }); + + // Remove from source collection + const deleteResult = await sourceCollection.deleteMany(filter, { session }); + + this.logger.info(`Moved ${documents.length} documents from ${fromCollection} to ${toCollection}`); + + return deleteResult.deletedCount || 0; + }); + } + + /** + * Archive old documents within a transaction + */ + async archiveDocuments( + sourceCollection: CollectionNames, + archiveCollection: CollectionNames, + cutoffDate: Date, + batchSize: number = 1000 + ): Promise { + let totalArchived = 0; + + while (true) { + const batchArchived = await this.withTransaction(async (session) => { + const collection = this.client.getCollection(sourceCollection); + const archiveCol = this.client.getCollection(archiveCollection); + + // Find old documents + const documents = await collection.find( + { created_at: { $lt: cutoffDate } }, + { limit: batchSize, session } + ).toArray(); + + if (documents.length === 0) { + return 0; + } + + // Add archive metadata + const now = new Date(); + const documentsToArchive = documents.map(doc => ({ + ...doc, + archived_at: now, + archived_from: sourceCollection + })); + + // Insert into archive collection + await archiveCol.insertMany(documentsToArchive, { session }); + + // Remove from source collection + const ids = documents.map(doc => doc._id); + const deleteResult = await collection.deleteMany( + { _id: { $in: ids } }, + { session } + ); + + return deleteResult.deletedCount || 0; + }); + + totalArchived += batchArchived; + + if (batchArchived === 0) { + break; + } + + this.logger.debug(`Archived batch of ${batchArchived} documents`); + } + + this.logger.info(`Archived ${totalArchived} documents from ${sourceCollection} to ${archiveCollection}`); + return totalArchived; + } +} diff --git a/libs/mongodb-client/src/types.ts b/libs/mongodb-client/src/types.ts index a6e5195..fdc74fb 100644 --- a/libs/mongodb-client/src/types.ts +++ b/libs/mongodb-client/src/types.ts @@ -1,215 +1,215 @@ -import * as yup from 'yup'; -import type { ObjectId } from 'mongodb'; - -/** - * MongoDB Client Configuration - */ -export interface MongoDBClientConfig { - host: string; - port: number; - database: string; - username?: string; - password?: string; - authSource?: string; - uri?: string; - poolSettings?: { - maxPoolSize: number; - minPoolSize: number; - maxIdleTime: number; - }; - timeouts?: { - connectTimeout: number; - socketTimeout: number; - serverSelectionTimeout: number; - }; - tls?: { - enabled: boolean; - insecure: boolean; - caFile?: string; - }; - options?: { - retryWrites: boolean; - journal: boolean; - readPreference: 'primary' | 'primaryPreferred' | 'secondary' | 'secondaryPreferred' | 'nearest'; - writeConcern: string; - }; -} - -/** - * MongoDB Connection Options - */ -export interface MongoDBConnectionOptions { - retryAttempts?: number; - retryDelay?: number; - healthCheckInterval?: number; -} - -/** - * Health Status Types - */ -export type MongoDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy'; - -export interface MongoDBHealthCheck { - status: MongoDBHealthStatus; - timestamp: Date; - latency: number; - connections: { - active: number; - available: number; - total: number; - }; - errors?: string[]; -} - -export interface MongoDBMetrics { - operationsPerSecond: number; - averageLatency: number; - errorRate: number; - connectionPoolUtilization: number; - documentsProcessed: number; -} - -/** - * Collection Names - */ -export type CollectionNames = - | 'sentiment_data' - | 'raw_documents' - | 'news_articles' - | 'sec_filings' - | 'earnings_transcripts' - | 'analyst_reports' - | 'social_media_posts' - | 'market_events' - | 'economic_indicators'; - -/** - * Base Document Interface - */ -export interface DocumentBase { - _id?: ObjectId; - created_at: Date; - updated_at: Date; - source: string; - metadata?: Record; -} - -/** - * Sentiment Data Document - */ -export interface SentimentData extends DocumentBase { - symbol: string; - sentiment_score: number; - sentiment_label: 'positive' | 'negative' | 'neutral'; - confidence: number; - text: string; - source_type: 'reddit' | 'twitter' | 'news' | 'forums'; - source_id: string; - timestamp: Date; - processed_at: Date; - language: string; - keywords: string[]; - entities: Array<{ - name: string; - type: string; - confidence: number; - }>; -} - -/** - * Raw Document - */ -export interface RawDocument extends DocumentBase { - document_type: 'html' | 'pdf' | 'text' | 'json' | 'xml'; - content: string; - content_hash: string; - url?: string; - title?: string; - author?: string; - published_date?: Date; - extracted_text?: string; - processing_status: 'pending' | 'processed' | 'failed'; - size_bytes: number; - language?: string; -} - -/** - * News Article - */ -export interface NewsArticle extends DocumentBase { - headline: string; - content: string; - summary?: string; - author: string; - publication: string; - published_date: Date; - url: string; - symbols: string[]; - categories: string[]; - sentiment_score?: number; - relevance_score?: number; - image_url?: string; - tags: string[]; -} - -/** - * SEC Filing - */ -export interface SecFiling extends DocumentBase { - cik: string; - accession_number: string; - filing_type: string; - company_name: string; - symbols: string[]; - filing_date: Date; - period_end_date: Date; - url: string; - content: string; - extracted_data?: Record; - financial_statements?: Array<{ - statement_type: string; - data: Record; - }>; - processing_status: 'pending' | 'processed' | 'failed'; -} - -/** - * Earnings Transcript - */ -export interface EarningsTranscript extends DocumentBase { - symbol: string; - company_name: string; - quarter: string; - year: number; - call_date: Date; - transcript: string; - participants: Array<{ - name: string; - title: string; - type: 'executive' | 'analyst'; - }>; - key_topics: string[]; - sentiment_analysis?: { - overall_sentiment: number; - topic_sentiments: Record; - }; - financial_highlights?: Record; -} - -/** - * Analyst Report - */ -export interface AnalystReport extends DocumentBase { - symbol: string; - analyst_firm: string; - analyst_name: string; - report_title: string; - report_date: Date; - rating: 'buy' | 'hold' | 'sell' | 'strong_buy' | 'strong_sell'; - price_target?: number; - previous_rating?: string; - content: string; - summary: string; - key_points: string[]; - financial_projections?: Record; -} +import * as yup from 'yup'; +import type { ObjectId } from 'mongodb'; + +/** + * MongoDB Client Configuration + */ +export interface MongoDBClientConfig { + host: string; + port: number; + database: string; + username?: string; + password?: string; + authSource?: string; + uri?: string; + poolSettings?: { + maxPoolSize: number; + minPoolSize: number; + maxIdleTime: number; + }; + timeouts?: { + connectTimeout: number; + socketTimeout: number; + serverSelectionTimeout: number; + }; + tls?: { + enabled: boolean; + insecure: boolean; + caFile?: string; + }; + options?: { + retryWrites: boolean; + journal: boolean; + readPreference: 'primary' | 'primaryPreferred' | 'secondary' | 'secondaryPreferred' | 'nearest'; + writeConcern: string; + }; +} + +/** + * MongoDB Connection Options + */ +export interface MongoDBConnectionOptions { + retryAttempts?: number; + retryDelay?: number; + healthCheckInterval?: number; +} + +/** + * Health Status Types + */ +export type MongoDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy'; + +export interface MongoDBHealthCheck { + status: MongoDBHealthStatus; + timestamp: Date; + latency: number; + connections: { + active: number; + available: number; + total: number; + }; + errors?: string[]; +} + +export interface MongoDBMetrics { + operationsPerSecond: number; + averageLatency: number; + errorRate: number; + connectionPoolUtilization: number; + documentsProcessed: number; +} + +/** + * Collection Names + */ +export type CollectionNames = + | 'sentiment_data' + | 'raw_documents' + | 'news_articles' + | 'sec_filings' + | 'earnings_transcripts' + | 'analyst_reports' + | 'social_media_posts' + | 'market_events' + | 'economic_indicators'; + +/** + * Base Document Interface + */ +export interface DocumentBase { + _id?: ObjectId; + created_at: Date; + updated_at: Date; + source: string; + metadata?: Record; +} + +/** + * Sentiment Data Document + */ +export interface SentimentData extends DocumentBase { + symbol: string; + sentiment_score: number; + sentiment_label: 'positive' | 'negative' | 'neutral'; + confidence: number; + text: string; + source_type: 'reddit' | 'twitter' | 'news' | 'forums'; + source_id: string; + timestamp: Date; + processed_at: Date; + language: string; + keywords: string[]; + entities: Array<{ + name: string; + type: string; + confidence: number; + }>; +} + +/** + * Raw Document + */ +export interface RawDocument extends DocumentBase { + document_type: 'html' | 'pdf' | 'text' | 'json' | 'xml'; + content: string; + content_hash: string; + url?: string; + title?: string; + author?: string; + published_date?: Date; + extracted_text?: string; + processing_status: 'pending' | 'processed' | 'failed'; + size_bytes: number; + language?: string; +} + +/** + * News Article + */ +export interface NewsArticle extends DocumentBase { + headline: string; + content: string; + summary?: string; + author: string; + publication: string; + published_date: Date; + url: string; + symbols: string[]; + categories: string[]; + sentiment_score?: number; + relevance_score?: number; + image_url?: string; + tags: string[]; +} + +/** + * SEC Filing + */ +export interface SecFiling extends DocumentBase { + cik: string; + accession_number: string; + filing_type: string; + company_name: string; + symbols: string[]; + filing_date: Date; + period_end_date: Date; + url: string; + content: string; + extracted_data?: Record; + financial_statements?: Array<{ + statement_type: string; + data: Record; + }>; + processing_status: 'pending' | 'processed' | 'failed'; +} + +/** + * Earnings Transcript + */ +export interface EarningsTranscript extends DocumentBase { + symbol: string; + company_name: string; + quarter: string; + year: number; + call_date: Date; + transcript: string; + participants: Array<{ + name: string; + title: string; + type: 'executive' | 'analyst'; + }>; + key_topics: string[]; + sentiment_analysis?: { + overall_sentiment: number; + topic_sentiments: Record; + }; + financial_highlights?: Record; +} + +/** + * Analyst Report + */ +export interface AnalystReport extends DocumentBase { + symbol: string; + analyst_firm: string; + analyst_name: string; + report_title: string; + report_date: Date; + rating: 'buy' | 'hold' | 'sell' | 'strong_buy' | 'strong_sell'; + price_target?: number; + previous_rating?: string; + content: string; + summary: string; + key_points: string[]; + financial_projections?: Record; +} diff --git a/libs/mongodb-client/tsconfig.json b/libs/mongodb-client/tsconfig.json index 3030b42..e8f78e0 100644 --- a/libs/mongodb-client/tsconfig.json +++ b/libs/mongodb-client/tsconfig.json @@ -1,13 +1,13 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../config" }, - { "path": "../logger" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../config" }, + { "path": "../logger" } + ] +} diff --git a/libs/mongodb-client/turbo.json b/libs/mongodb-client/turbo.json index 792d858..92c4460 100644 --- a/libs/mongodb-client/turbo.json +++ b/libs/mongodb-client/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/postgres-client/README.md b/libs/postgres-client/README.md index df75162..ad2abab 100644 --- a/libs/postgres-client/README.md +++ b/libs/postgres-client/README.md @@ -1,82 +1,82 @@ -# PostgreSQL Client Library - -A comprehensive PostgreSQL client library for the Stock Bot trading platform, designed for operational data, transactions, and relational queries. - -## Features - -- **Connection Pooling**: Robust connection pool management -- **Type Safety**: Full TypeScript support with typed queries -- **Transaction Support**: Multi-statement transactions with rollback -- **Schema Management**: Database schema validation and migrations -- **Query Builder**: Fluent query building interface -- **Health Monitoring**: Connection health monitoring and metrics -- **Performance Tracking**: Query performance monitoring and optimization - -## Usage - -```typescript -import { PostgreSQLClient } from '@stock-bot/postgres-client'; - -// Initialize client -const pgClient = new PostgreSQLClient(); -await pgClient.connect(); - -// Execute a query -const users = await pgClient.query('SELECT * FROM users WHERE active = $1', [true]); - -// Use query builder -const trades = await pgClient - .select('*') - .from('trades') - .where('symbol', '=', 'AAPL') - .orderBy('created_at', 'DESC') - .limit(10) - .execute(); - -// Execute in transaction -await pgClient.transaction(async (tx) => { - await tx.query('INSERT INTO trades (...) VALUES (...)', []); - await tx.query('UPDATE portfolio SET balance = balance - $1', [amount]); -}); -``` - -## Database Schemas - -The client provides typed access to the following schemas: - -- **trading**: Core trading operations (trades, orders, positions) -- **strategy**: Strategy definitions and performance -- **risk**: Risk management and compliance -- **audit**: Audit trails and logging - -## Configuration - -Configure using environment variables: - -```env -POSTGRES_HOST=localhost -POSTGRES_PORT=5432 -POSTGRES_DATABASE=stockbot -POSTGRES_USERNAME=stockbot -POSTGRES_PASSWORD=your_password -``` - -## Query Builder - -The fluent query builder supports: - -- SELECT, INSERT, UPDATE, DELETE operations -- Complex WHERE conditions with AND/OR logic -- JOINs (INNER, LEFT, RIGHT, FULL) -- Aggregations (COUNT, SUM, AVG, etc.) -- Subqueries and CTEs -- Window functions - -## Health Monitoring - -The client includes built-in health monitoring: - -```typescript -const health = await pgClient.getHealth(); -console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy' -``` +# PostgreSQL Client Library + +A comprehensive PostgreSQL client library for the Stock Bot trading platform, designed for operational data, transactions, and relational queries. + +## Features + +- **Connection Pooling**: Robust connection pool management +- **Type Safety**: Full TypeScript support with typed queries +- **Transaction Support**: Multi-statement transactions with rollback +- **Schema Management**: Database schema validation and migrations +- **Query Builder**: Fluent query building interface +- **Health Monitoring**: Connection health monitoring and metrics +- **Performance Tracking**: Query performance monitoring and optimization + +## Usage + +```typescript +import { PostgreSQLClient } from '@stock-bot/postgres-client'; + +// Initialize client +const pgClient = new PostgreSQLClient(); +await pgClient.connect(); + +// Execute a query +const users = await pgClient.query('SELECT * FROM users WHERE active = $1', [true]); + +// Use query builder +const trades = await pgClient + .select('*') + .from('trades') + .where('symbol', '=', 'AAPL') + .orderBy('created_at', 'DESC') + .limit(10) + .execute(); + +// Execute in transaction +await pgClient.transaction(async (tx) => { + await tx.query('INSERT INTO trades (...) VALUES (...)', []); + await tx.query('UPDATE portfolio SET balance = balance - $1', [amount]); +}); +``` + +## Database Schemas + +The client provides typed access to the following schemas: + +- **trading**: Core trading operations (trades, orders, positions) +- **strategy**: Strategy definitions and performance +- **risk**: Risk management and compliance +- **audit**: Audit trails and logging + +## Configuration + +Configure using environment variables: + +```env +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DATABASE=stockbot +POSTGRES_USERNAME=stockbot +POSTGRES_PASSWORD=your_password +``` + +## Query Builder + +The fluent query builder supports: + +- SELECT, INSERT, UPDATE, DELETE operations +- Complex WHERE conditions with AND/OR logic +- JOINs (INNER, LEFT, RIGHT, FULL) +- Aggregations (COUNT, SUM, AVG, etc.) +- Subqueries and CTEs +- Window functions + +## Health Monitoring + +The client includes built-in health monitoring: + +```typescript +const health = await pgClient.getHealth(); +console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy' +``` diff --git a/libs/postgres-client/package.json b/libs/postgres-client/package.json index 5697d5d..ef5261d 100644 --- a/libs/postgres-client/package.json +++ b/libs/postgres-client/package.json @@ -1,47 +1,47 @@ -{ - "name": "@stock-bot/postgres-client", - "version": "1.0.0", - "description": "PostgreSQL client library for Stock Bot platform", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "test": "bun test", - "lint": "eslint src/**/*.ts", - "type-check": "tsc --noEmit", - "clean": "rimraf dist" - }, - "dependencies": { "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*", - "pg": "^8.11.3", - "yup": "^1.6.1" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "@types/pg": "^8.10.7", - "typescript": "^5.3.0", - "eslint": "^8.56.0", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15" - }, - "keywords": [ - "postgresql", - "database", - "client", - "stock-bot" - ], - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] -} +{ + "name": "@stock-bot/postgres-client", + "version": "1.0.0", + "description": "PostgreSQL client library for Stock Bot platform", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "test": "bun test", + "lint": "eslint src/**/*.ts", + "type-check": "tsc --noEmit", + "clean": "rimraf dist" + }, + "dependencies": { "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*", + "pg": "^8.11.3", + "yup": "^1.6.1" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "@types/pg": "^8.10.7", + "typescript": "^5.3.0", + "eslint": "^8.56.0", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15" + }, + "keywords": [ + "postgresql", + "database", + "client", + "stock-bot" + ], + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] +} diff --git a/libs/postgres-client/src/client.ts b/libs/postgres-client/src/client.ts index f1b1a68..f58f1f0 100644 --- a/libs/postgres-client/src/client.ts +++ b/libs/postgres-client/src/client.ts @@ -1,339 +1,339 @@ -import { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg'; -import { postgresConfig } from '@stock-bot/config'; -import { getLogger } from '@stock-bot/logger'; -import type { - PostgreSQLClientConfig, - PostgreSQLConnectionOptions, - QueryResult, - TransactionCallback -} from './types'; -import { PostgreSQLHealthMonitor } from './health'; -import { PostgreSQLQueryBuilder } from './query-builder'; -import { PostgreSQLTransactionManager } from './transactions'; - -/** - * PostgreSQL Client for Stock Bot - * - * Provides type-safe access to PostgreSQL with connection pooling, - * health monitoring, and transaction support. - */ -export class PostgreSQLClient { - private pool: Pool | null = null; - private readonly config: PostgreSQLClientConfig; - private readonly options: PostgreSQLConnectionOptions; - private readonly logger: ReturnType; - private readonly healthMonitor: PostgreSQLHealthMonitor; - private readonly transactionManager: PostgreSQLTransactionManager; - private isConnected = false; - - constructor( - config?: Partial, - options?: PostgreSQLConnectionOptions - ) { - this.config = this.buildConfig(config); - this.options = { - retryAttempts: 3, - retryDelay: 1000, - healthCheckInterval: 30000, - ...options - }; - - this.logger = getLogger('postgres-client'); - this.healthMonitor = new PostgreSQLHealthMonitor(this); - this.transactionManager = new PostgreSQLTransactionManager(this); - } - - /** - * Connect to PostgreSQL - */ - async connect(): Promise { - if (this.isConnected && this.pool) { - return; - } - - let lastError: Error | null = null; - - for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) { - try { - this.logger.info(`Connecting to PostgreSQL (attempt ${attempt}/${this.options.retryAttempts})...`); - - this.pool = new Pool(this.buildPoolConfig()); - - // Test the connection - const client = await this.pool.connect(); - await client.query('SELECT 1'); - client.release(); - - this.isConnected = true; - this.logger.info('Successfully connected to PostgreSQL'); - - // Start health monitoring - this.healthMonitor.start(); - - // Setup error handlers - this.setupErrorHandlers(); - - return; - } catch (error) { - lastError = error as Error; - this.logger.error(`PostgreSQL connection attempt ${attempt} failed:`, error); - - if (this.pool) { - await this.pool.end(); - this.pool = null; - } - - if (attempt < this.options.retryAttempts!) { - await this.delay(this.options.retryDelay! * attempt); - } - } - } - - throw new Error(`Failed to connect to PostgreSQL after ${this.options.retryAttempts} attempts: ${lastError?.message}`); - } - - /** - * Disconnect from PostgreSQL - */ - async disconnect(): Promise { - if (!this.pool) { - return; - } - - try { - this.healthMonitor.stop(); - await this.pool.end(); - this.isConnected = false; - this.pool = null; - this.logger.info('Disconnected from PostgreSQL'); - } catch (error) { - this.logger.error('Error disconnecting from PostgreSQL:', error); - throw error; - } - } - - /** - * Execute a query - */ - async query(text: string, params?: any[]): Promise> { - if (!this.pool) { - throw new Error('PostgreSQL client not connected'); - } - - const startTime = Date.now(); - - try { - const result = await this.pool.query(text, params); - const executionTime = Date.now() - startTime; - - this.logger.debug(`Query executed in ${executionTime}ms`, { - query: text.substring(0, 100), - params: params?.length - }); - - return { - ...result, - executionTime - } as QueryResult; - } catch (error) { - const executionTime = Date.now() - startTime; - this.logger.error(`Query failed after ${executionTime}ms:`, { - error, - query: text, - params - }); - throw error; - } - } - - /** - * Execute multiple queries in a transaction - */ - async transaction(callback: TransactionCallback): Promise { - return await this.transactionManager.execute(callback); - } - - /** - * Get a query builder instance - */ - queryBuilder(): PostgreSQLQueryBuilder { - return new PostgreSQLQueryBuilder(this); - } - - /** - * Create a new query builder with SELECT - */ - select(columns: string | string[] = '*'): PostgreSQLQueryBuilder { - return this.queryBuilder().select(columns); - } - - /** - * Create a new query builder with INSERT - */ - insert(table: string): PostgreSQLQueryBuilder { - return this.queryBuilder().insert(table); - } - - /** - * Create a new query builder with UPDATE - */ - update(table: string): PostgreSQLQueryBuilder { - return this.queryBuilder().update(table); - } - - /** - * Create a new query builder with DELETE - */ - delete(table: string): PostgreSQLQueryBuilder { - return this.queryBuilder().delete(table); - } - - /** - * Execute a stored procedure or function - */ - async callFunction(functionName: string, params?: any[]): Promise> { - const placeholders = params ? params.map((_, i) => `$${i + 1}`).join(', ') : ''; - const query = `SELECT * FROM ${functionName}(${placeholders})`; - return await this.query(query, params); - } - - /** - * Check if a table exists - */ - async tableExists(tableName: string, schemaName: string = 'public'): Promise { - const result = await this.query( - `SELECT EXISTS ( - SELECT FROM information_schema.tables - WHERE table_schema = $1 AND table_name = $2 - )`, - [schemaName, tableName] - ); - return result.rows[0].exists; - } - - /** - * Get table schema information - */ - async getTableSchema(tableName: string, schemaName: string = 'public'): Promise { - const result = await this.query( - `SELECT - column_name, - data_type, - is_nullable, - column_default, - character_maximum_length - FROM information_schema.columns - WHERE table_schema = $1 AND table_name = $2 - ORDER BY ordinal_position`, - [schemaName, tableName] - ); - return result.rows; - } - - /** - * Execute EXPLAIN for query analysis - */ - async explain(query: string, params?: any[]): Promise { - const explainQuery = `EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON) ${query}`; - const result = await this.query(explainQuery, params); - return result.rows[0]['QUERY PLAN']; - } - - /** - * Get database statistics - */ - async getStats(): Promise { - const result = await this.query(` - SELECT - (SELECT count(*) FROM pg_stat_activity WHERE state = 'active') as active_connections, - (SELECT count(*) FROM pg_stat_activity WHERE state = 'idle') as idle_connections, - (SELECT setting FROM pg_settings WHERE name = 'max_connections') as max_connections, - pg_size_pretty(pg_database_size(current_database())) as database_size - `); - return result.rows[0]; - } - - /** - * Check if client is connected - */ - get connected(): boolean { - return this.isConnected && !!this.pool; - } - - /** - * Get the underlying connection pool - */ - get connectionPool(): Pool | null { - return this.pool; - } - - private buildConfig(config?: Partial): PostgreSQLClientConfig { - return { - host: config?.host || postgresConfig.POSTGRES_HOST, - port: config?.port || postgresConfig.POSTGRES_PORT, - database: config?.database || postgresConfig.POSTGRES_DATABASE, - username: config?.username || postgresConfig.POSTGRES_USERNAME, - password: config?.password || postgresConfig.POSTGRES_PASSWORD, - poolSettings: { - min: postgresConfig.POSTGRES_POOL_MIN, - max: postgresConfig.POSTGRES_POOL_MAX, - idleTimeoutMillis: postgresConfig.POSTGRES_POOL_IDLE_TIMEOUT, - ...config?.poolSettings - }, - ssl: { - enabled: postgresConfig.POSTGRES_SSL, - rejectUnauthorized: postgresConfig.POSTGRES_SSL_REJECT_UNAUTHORIZED, - ...config?.ssl - }, - timeouts: { - query: postgresConfig.POSTGRES_QUERY_TIMEOUT, - connection: postgresConfig.POSTGRES_CONNECTION_TIMEOUT, - statement: postgresConfig.POSTGRES_STATEMENT_TIMEOUT, - lock: postgresConfig.POSTGRES_LOCK_TIMEOUT, - idleInTransaction: postgresConfig.POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT, - ...config?.timeouts - } - }; - } - - private buildPoolConfig(): any { - return { - host: this.config.host, - port: this.config.port, - database: this.config.database, - user: this.config.username, - password: this.config.password, - min: this.config.poolSettings?.min, - max: this.config.poolSettings?.max, - idleTimeoutMillis: this.config.poolSettings?.idleTimeoutMillis, - connectionTimeoutMillis: this.config.timeouts?.connection, - query_timeout: this.config.timeouts?.query, - statement_timeout: this.config.timeouts?.statement, - lock_timeout: this.config.timeouts?.lock, - idle_in_transaction_session_timeout: this.config.timeouts?.idleInTransaction, - ssl: this.config.ssl?.enabled ? { - rejectUnauthorized: this.config.ssl.rejectUnauthorized - } : false - }; - } - - private setupErrorHandlers(): void { - if (!this.pool) return; - - this.pool.on('error', (error) => { - this.logger.error('PostgreSQL pool error:', error); - }); - - this.pool.on('connect', () => { - this.logger.debug('New PostgreSQL client connected'); - }); - - this.pool.on('remove', () => { - this.logger.debug('PostgreSQL client removed from pool'); - }); - } - - private delay(ms: number): Promise { - return new Promise(resolve => setTimeout(resolve, ms)); - } -} +import { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg'; +import { postgresConfig } from '@stock-bot/config'; +import { getLogger } from '@stock-bot/logger'; +import type { + PostgreSQLClientConfig, + PostgreSQLConnectionOptions, + QueryResult, + TransactionCallback +} from './types'; +import { PostgreSQLHealthMonitor } from './health'; +import { PostgreSQLQueryBuilder } from './query-builder'; +import { PostgreSQLTransactionManager } from './transactions'; + +/** + * PostgreSQL Client for Stock Bot + * + * Provides type-safe access to PostgreSQL with connection pooling, + * health monitoring, and transaction support. + */ +export class PostgreSQLClient { + private pool: Pool | null = null; + private readonly config: PostgreSQLClientConfig; + private readonly options: PostgreSQLConnectionOptions; + private readonly logger: ReturnType; + private readonly healthMonitor: PostgreSQLHealthMonitor; + private readonly transactionManager: PostgreSQLTransactionManager; + private isConnected = false; + + constructor( + config?: Partial, + options?: PostgreSQLConnectionOptions + ) { + this.config = this.buildConfig(config); + this.options = { + retryAttempts: 3, + retryDelay: 1000, + healthCheckInterval: 30000, + ...options + }; + + this.logger = getLogger('postgres-client'); + this.healthMonitor = new PostgreSQLHealthMonitor(this); + this.transactionManager = new PostgreSQLTransactionManager(this); + } + + /** + * Connect to PostgreSQL + */ + async connect(): Promise { + if (this.isConnected && this.pool) { + return; + } + + let lastError: Error | null = null; + + for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) { + try { + this.logger.info(`Connecting to PostgreSQL (attempt ${attempt}/${this.options.retryAttempts})...`); + + this.pool = new Pool(this.buildPoolConfig()); + + // Test the connection + const client = await this.pool.connect(); + await client.query('SELECT 1'); + client.release(); + + this.isConnected = true; + this.logger.info('Successfully connected to PostgreSQL'); + + // Start health monitoring + this.healthMonitor.start(); + + // Setup error handlers + this.setupErrorHandlers(); + + return; + } catch (error) { + lastError = error as Error; + this.logger.error(`PostgreSQL connection attempt ${attempt} failed:`, error); + + if (this.pool) { + await this.pool.end(); + this.pool = null; + } + + if (attempt < this.options.retryAttempts!) { + await this.delay(this.options.retryDelay! * attempt); + } + } + } + + throw new Error(`Failed to connect to PostgreSQL after ${this.options.retryAttempts} attempts: ${lastError?.message}`); + } + + /** + * Disconnect from PostgreSQL + */ + async disconnect(): Promise { + if (!this.pool) { + return; + } + + try { + this.healthMonitor.stop(); + await this.pool.end(); + this.isConnected = false; + this.pool = null; + this.logger.info('Disconnected from PostgreSQL'); + } catch (error) { + this.logger.error('Error disconnecting from PostgreSQL:', error); + throw error; + } + } + + /** + * Execute a query + */ + async query(text: string, params?: any[]): Promise> { + if (!this.pool) { + throw new Error('PostgreSQL client not connected'); + } + + const startTime = Date.now(); + + try { + const result = await this.pool.query(text, params); + const executionTime = Date.now() - startTime; + + this.logger.debug(`Query executed in ${executionTime}ms`, { + query: text.substring(0, 100), + params: params?.length + }); + + return { + ...result, + executionTime + } as QueryResult; + } catch (error) { + const executionTime = Date.now() - startTime; + this.logger.error(`Query failed after ${executionTime}ms:`, { + error, + query: text, + params + }); + throw error; + } + } + + /** + * Execute multiple queries in a transaction + */ + async transaction(callback: TransactionCallback): Promise { + return await this.transactionManager.execute(callback); + } + + /** + * Get a query builder instance + */ + queryBuilder(): PostgreSQLQueryBuilder { + return new PostgreSQLQueryBuilder(this); + } + + /** + * Create a new query builder with SELECT + */ + select(columns: string | string[] = '*'): PostgreSQLQueryBuilder { + return this.queryBuilder().select(columns); + } + + /** + * Create a new query builder with INSERT + */ + insert(table: string): PostgreSQLQueryBuilder { + return this.queryBuilder().insert(table); + } + + /** + * Create a new query builder with UPDATE + */ + update(table: string): PostgreSQLQueryBuilder { + return this.queryBuilder().update(table); + } + + /** + * Create a new query builder with DELETE + */ + delete(table: string): PostgreSQLQueryBuilder { + return this.queryBuilder().delete(table); + } + + /** + * Execute a stored procedure or function + */ + async callFunction(functionName: string, params?: any[]): Promise> { + const placeholders = params ? params.map((_, i) => `$${i + 1}`).join(', ') : ''; + const query = `SELECT * FROM ${functionName}(${placeholders})`; + return await this.query(query, params); + } + + /** + * Check if a table exists + */ + async tableExists(tableName: string, schemaName: string = 'public'): Promise { + const result = await this.query( + `SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_schema = $1 AND table_name = $2 + )`, + [schemaName, tableName] + ); + return result.rows[0].exists; + } + + /** + * Get table schema information + */ + async getTableSchema(tableName: string, schemaName: string = 'public'): Promise { + const result = await this.query( + `SELECT + column_name, + data_type, + is_nullable, + column_default, + character_maximum_length + FROM information_schema.columns + WHERE table_schema = $1 AND table_name = $2 + ORDER BY ordinal_position`, + [schemaName, tableName] + ); + return result.rows; + } + + /** + * Execute EXPLAIN for query analysis + */ + async explain(query: string, params?: any[]): Promise { + const explainQuery = `EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON) ${query}`; + const result = await this.query(explainQuery, params); + return result.rows[0]['QUERY PLAN']; + } + + /** + * Get database statistics + */ + async getStats(): Promise { + const result = await this.query(` + SELECT + (SELECT count(*) FROM pg_stat_activity WHERE state = 'active') as active_connections, + (SELECT count(*) FROM pg_stat_activity WHERE state = 'idle') as idle_connections, + (SELECT setting FROM pg_settings WHERE name = 'max_connections') as max_connections, + pg_size_pretty(pg_database_size(current_database())) as database_size + `); + return result.rows[0]; + } + + /** + * Check if client is connected + */ + get connected(): boolean { + return this.isConnected && !!this.pool; + } + + /** + * Get the underlying connection pool + */ + get connectionPool(): Pool | null { + return this.pool; + } + + private buildConfig(config?: Partial): PostgreSQLClientConfig { + return { + host: config?.host || postgresConfig.POSTGRES_HOST, + port: config?.port || postgresConfig.POSTGRES_PORT, + database: config?.database || postgresConfig.POSTGRES_DATABASE, + username: config?.username || postgresConfig.POSTGRES_USERNAME, + password: config?.password || postgresConfig.POSTGRES_PASSWORD, + poolSettings: { + min: postgresConfig.POSTGRES_POOL_MIN, + max: postgresConfig.POSTGRES_POOL_MAX, + idleTimeoutMillis: postgresConfig.POSTGRES_POOL_IDLE_TIMEOUT, + ...config?.poolSettings + }, + ssl: { + enabled: postgresConfig.POSTGRES_SSL, + rejectUnauthorized: postgresConfig.POSTGRES_SSL_REJECT_UNAUTHORIZED, + ...config?.ssl + }, + timeouts: { + query: postgresConfig.POSTGRES_QUERY_TIMEOUT, + connection: postgresConfig.POSTGRES_CONNECTION_TIMEOUT, + statement: postgresConfig.POSTGRES_STATEMENT_TIMEOUT, + lock: postgresConfig.POSTGRES_LOCK_TIMEOUT, + idleInTransaction: postgresConfig.POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT, + ...config?.timeouts + } + }; + } + + private buildPoolConfig(): any { + return { + host: this.config.host, + port: this.config.port, + database: this.config.database, + user: this.config.username, + password: this.config.password, + min: this.config.poolSettings?.min, + max: this.config.poolSettings?.max, + idleTimeoutMillis: this.config.poolSettings?.idleTimeoutMillis, + connectionTimeoutMillis: this.config.timeouts?.connection, + query_timeout: this.config.timeouts?.query, + statement_timeout: this.config.timeouts?.statement, + lock_timeout: this.config.timeouts?.lock, + idle_in_transaction_session_timeout: this.config.timeouts?.idleInTransaction, + ssl: this.config.ssl?.enabled ? { + rejectUnauthorized: this.config.ssl.rejectUnauthorized + } : false + }; + } + + private setupErrorHandlers(): void { + if (!this.pool) return; + + this.pool.on('error', (error) => { + this.logger.error('PostgreSQL pool error:', error); + }); + + this.pool.on('connect', () => { + this.logger.debug('New PostgreSQL client connected'); + }); + + this.pool.on('remove', () => { + this.logger.debug('PostgreSQL client removed from pool'); + }); + } + + private delay(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); + } +} diff --git a/libs/postgres-client/src/factory.ts b/libs/postgres-client/src/factory.ts index 5b097bd..a202ad7 100644 --- a/libs/postgres-client/src/factory.ts +++ b/libs/postgres-client/src/factory.ts @@ -1,64 +1,64 @@ -import { PostgreSQLClient } from './client'; -import { postgresConfig } from '@stock-bot/config'; -import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions } from './types'; - -/** - * Factory function to create a PostgreSQL client instance - */ -export function createPostgreSQLClient( - config?: Partial, - options?: PostgreSQLConnectionOptions -): PostgreSQLClient { - return new PostgreSQLClient(config, options); -} - -/** - * Create a PostgreSQL client with default configuration - */ -export function createDefaultPostgreSQLClient(): PostgreSQLClient { - const config: Partial = { - host: postgresConfig.POSTGRES_HOST, - port: postgresConfig.POSTGRES_PORT, - database: postgresConfig.POSTGRES_DATABASE, - username: postgresConfig.POSTGRES_USERNAME, - password: postgresConfig.POSTGRES_PASSWORD - }; - - return new PostgreSQLClient(config); -} - -/** - * Singleton PostgreSQL client instance - */ -let defaultClient: PostgreSQLClient | null = null; - -/** - * Get or create the default PostgreSQL client instance - */ -export function getPostgreSQLClient(): PostgreSQLClient { - if (!defaultClient) { - defaultClient = createDefaultPostgreSQLClient(); - } - return defaultClient; -} - -/** - * Connect to PostgreSQL using the default client - */ -export async function connectPostgreSQL(): Promise { - const client = getPostgreSQLClient(); - if (!client.connected) { - await client.connect(); - } - return client; -} - -/** - * Disconnect from PostgreSQL - */ -export async function disconnectPostgreSQL(): Promise { - if (defaultClient) { - await defaultClient.disconnect(); - defaultClient = null; - } -} +import { PostgreSQLClient } from './client'; +import { postgresConfig } from '@stock-bot/config'; +import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions } from './types'; + +/** + * Factory function to create a PostgreSQL client instance + */ +export function createPostgreSQLClient( + config?: Partial, + options?: PostgreSQLConnectionOptions +): PostgreSQLClient { + return new PostgreSQLClient(config, options); +} + +/** + * Create a PostgreSQL client with default configuration + */ +export function createDefaultPostgreSQLClient(): PostgreSQLClient { + const config: Partial = { + host: postgresConfig.POSTGRES_HOST, + port: postgresConfig.POSTGRES_PORT, + database: postgresConfig.POSTGRES_DATABASE, + username: postgresConfig.POSTGRES_USERNAME, + password: postgresConfig.POSTGRES_PASSWORD + }; + + return new PostgreSQLClient(config); +} + +/** + * Singleton PostgreSQL client instance + */ +let defaultClient: PostgreSQLClient | null = null; + +/** + * Get or create the default PostgreSQL client instance + */ +export function getPostgreSQLClient(): PostgreSQLClient { + if (!defaultClient) { + defaultClient = createDefaultPostgreSQLClient(); + } + return defaultClient; +} + +/** + * Connect to PostgreSQL using the default client + */ +export async function connectPostgreSQL(): Promise { + const client = getPostgreSQLClient(); + if (!client.connected) { + await client.connect(); + } + return client; +} + +/** + * Disconnect from PostgreSQL + */ +export async function disconnectPostgreSQL(): Promise { + if (defaultClient) { + await defaultClient.disconnect(); + defaultClient = null; + } +} diff --git a/libs/postgres-client/src/health.ts b/libs/postgres-client/src/health.ts index bfd4f3f..61353a6 100644 --- a/libs/postgres-client/src/health.ts +++ b/libs/postgres-client/src/health.ts @@ -1,142 +1,142 @@ -import { getLogger } from '@stock-bot/logger'; -import type { PostgreSQLClient } from './client'; -import type { PostgreSQLHealthCheck, PostgreSQLHealthStatus, PostgreSQLMetrics } from './types'; - -/** - * PostgreSQL Health Monitor - * - * Monitors PostgreSQL connection health and provides metrics - */ -export class PostgreSQLHealthMonitor { - private readonly client: PostgreSQLClient; - private readonly logger: ReturnType; - private healthCheckInterval: NodeJS.Timeout | null = null; - private metrics: PostgreSQLMetrics; - private lastHealthCheck: PostgreSQLHealthCheck | null = null; - - constructor(client: PostgreSQLClient) { - this.client = client; - this.logger = getLogger('postgres-health-monitor'); - this.metrics = { - queriesPerSecond: 0, - averageQueryTime: 0, - errorRate: 0, - connectionPoolUtilization: 0, - slowQueries: 0 - }; - } - - /** - * Start health monitoring - */ - start(intervalMs: number = 30000): void { - if (this.healthCheckInterval) { - this.stop(); - } - - this.logger.info(`Starting PostgreSQL health monitoring (interval: ${intervalMs}ms)`); - - this.healthCheckInterval = setInterval(async () => { - try { - await this.performHealthCheck(); - } catch (error) { - this.logger.error('Health check failed:', error); - } - }, intervalMs); - - // Perform initial health check - this.performHealthCheck().catch(error => { - this.logger.error('Initial health check failed:', error); - }); - } - - /** - * Stop health monitoring - */ - stop(): void { - if (this.healthCheckInterval) { - clearInterval(this.healthCheckInterval); - this.healthCheckInterval = null; - this.logger.info('Stopped PostgreSQL health monitoring'); - } - } - - /** - * Get current health status - */ - async getHealth(): Promise { - if (!this.lastHealthCheck) { - await this.performHealthCheck(); - } - return this.lastHealthCheck!; - } - - /** - * Get current metrics - */ - getMetrics(): PostgreSQLMetrics { - return { ...this.metrics }; - } - - /** - * Perform a health check - */ - private async performHealthCheck(): Promise { - const startTime = Date.now(); - const errors: string[] = []; - let status: PostgreSQLHealthStatus = 'healthy'; - - try { - if (!this.client.connected) { - errors.push('PostgreSQL client not connected'); - status = 'unhealthy'; - } else { - // Test basic connectivity - await this.client.query('SELECT 1'); - - // Get connection stats - const stats = await this.client.getStats(); - - // Check connection pool utilization - const utilization = parseInt(stats.active_connections) / parseInt(stats.max_connections); - if (utilization > 0.8) { - errors.push('High connection pool utilization'); - status = status === 'healthy' ? 'degraded' : status; - } - - // Check for high latency - const latency = Date.now() - startTime; - if (latency > 1000) { - errors.push(`High latency: ${latency}ms`); - status = status === 'healthy' ? 'degraded' : status; - } - - this.metrics.connectionPoolUtilization = utilization; - } - } catch (error) { - errors.push(`Health check failed: ${(error as Error).message}`); - status = 'unhealthy'; - } - - const latency = Date.now() - startTime; - - this.lastHealthCheck = { - status, - timestamp: new Date(), - latency, - connections: { - active: 1, - idle: 9, - total: 10 - }, - errors: errors.length > 0 ? errors : undefined - }; - - // Log health status changes - if (status !== 'healthy') { - this.logger.warn(`PostgreSQL health status: ${status}`, { errors, latency }); - } else { - this.logger.debug(`PostgreSQL health check passed (${latency}ms)`); - } - } -} +import { getLogger } from '@stock-bot/logger'; +import type { PostgreSQLClient } from './client'; +import type { PostgreSQLHealthCheck, PostgreSQLHealthStatus, PostgreSQLMetrics } from './types'; + +/** + * PostgreSQL Health Monitor + * + * Monitors PostgreSQL connection health and provides metrics + */ +export class PostgreSQLHealthMonitor { + private readonly client: PostgreSQLClient; + private readonly logger: ReturnType; + private healthCheckInterval: NodeJS.Timeout | null = null; + private metrics: PostgreSQLMetrics; + private lastHealthCheck: PostgreSQLHealthCheck | null = null; + + constructor(client: PostgreSQLClient) { + this.client = client; + this.logger = getLogger('postgres-health-monitor'); + this.metrics = { + queriesPerSecond: 0, + averageQueryTime: 0, + errorRate: 0, + connectionPoolUtilization: 0, + slowQueries: 0 + }; + } + + /** + * Start health monitoring + */ + start(intervalMs: number = 30000): void { + if (this.healthCheckInterval) { + this.stop(); + } + + this.logger.info(`Starting PostgreSQL health monitoring (interval: ${intervalMs}ms)`); + + this.healthCheckInterval = setInterval(async () => { + try { + await this.performHealthCheck(); + } catch (error) { + this.logger.error('Health check failed:', error); + } + }, intervalMs); + + // Perform initial health check + this.performHealthCheck().catch(error => { + this.logger.error('Initial health check failed:', error); + }); + } + + /** + * Stop health monitoring + */ + stop(): void { + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval); + this.healthCheckInterval = null; + this.logger.info('Stopped PostgreSQL health monitoring'); + } + } + + /** + * Get current health status + */ + async getHealth(): Promise { + if (!this.lastHealthCheck) { + await this.performHealthCheck(); + } + return this.lastHealthCheck!; + } + + /** + * Get current metrics + */ + getMetrics(): PostgreSQLMetrics { + return { ...this.metrics }; + } + + /** + * Perform a health check + */ + private async performHealthCheck(): Promise { + const startTime = Date.now(); + const errors: string[] = []; + let status: PostgreSQLHealthStatus = 'healthy'; + + try { + if (!this.client.connected) { + errors.push('PostgreSQL client not connected'); + status = 'unhealthy'; + } else { + // Test basic connectivity + await this.client.query('SELECT 1'); + + // Get connection stats + const stats = await this.client.getStats(); + + // Check connection pool utilization + const utilization = parseInt(stats.active_connections) / parseInt(stats.max_connections); + if (utilization > 0.8) { + errors.push('High connection pool utilization'); + status = status === 'healthy' ? 'degraded' : status; + } + + // Check for high latency + const latency = Date.now() - startTime; + if (latency > 1000) { + errors.push(`High latency: ${latency}ms`); + status = status === 'healthy' ? 'degraded' : status; + } + + this.metrics.connectionPoolUtilization = utilization; + } + } catch (error) { + errors.push(`Health check failed: ${(error as Error).message}`); + status = 'unhealthy'; + } + + const latency = Date.now() - startTime; + + this.lastHealthCheck = { + status, + timestamp: new Date(), + latency, + connections: { + active: 1, + idle: 9, + total: 10 + }, + errors: errors.length > 0 ? errors : undefined + }; + + // Log health status changes + if (status !== 'healthy') { + this.logger.warn(`PostgreSQL health status: ${status}`, { errors, latency }); + } else { + this.logger.debug(`PostgreSQL health check passed (${latency}ms)`); + } + } +} diff --git a/libs/postgres-client/src/index.ts b/libs/postgres-client/src/index.ts index 8e5ce2d..4867c0e 100644 --- a/libs/postgres-client/src/index.ts +++ b/libs/postgres-client/src/index.ts @@ -1,34 +1,34 @@ -/** - * PostgreSQL Client Library for Stock Bot - * - * Provides type-safe PostgreSQL access for operational data, - * transactions, and relational queries. - */ - -export { PostgreSQLClient } from './client'; -export { PostgreSQLHealthMonitor } from './health'; -export { PostgreSQLTransactionManager } from './transactions'; -export { PostgreSQLQueryBuilder } from './query-builder'; -// export { PostgreSQLMigrationManager } from './migrations'; // TODO: Implement migrations - -// Types -export type { - PostgreSQLClientConfig, - PostgreSQLConnectionOptions, - PostgreSQLHealthStatus, - PostgreSQLMetrics, - QueryResult, - TransactionCallback, - SchemaNames, - TableNames, - Trade, - Order, - Position, - Portfolio, - Strategy, - RiskLimit, - AuditLog -} from './types'; - -// Utils -export { createPostgreSQLClient, getPostgreSQLClient } from './factory'; +/** + * PostgreSQL Client Library for Stock Bot + * + * Provides type-safe PostgreSQL access for operational data, + * transactions, and relational queries. + */ + +export { PostgreSQLClient } from './client'; +export { PostgreSQLHealthMonitor } from './health'; +export { PostgreSQLTransactionManager } from './transactions'; +export { PostgreSQLQueryBuilder } from './query-builder'; +// export { PostgreSQLMigrationManager } from './migrations'; // TODO: Implement migrations + +// Types +export type { + PostgreSQLClientConfig, + PostgreSQLConnectionOptions, + PostgreSQLHealthStatus, + PostgreSQLMetrics, + QueryResult, + TransactionCallback, + SchemaNames, + TableNames, + Trade, + Order, + Position, + Portfolio, + Strategy, + RiskLimit, + AuditLog +} from './types'; + +// Utils +export { createPostgreSQLClient, getPostgreSQLClient } from './factory'; diff --git a/libs/postgres-client/src/query-builder.ts b/libs/postgres-client/src/query-builder.ts index dee550f..c3ae656 100644 --- a/libs/postgres-client/src/query-builder.ts +++ b/libs/postgres-client/src/query-builder.ts @@ -1,268 +1,268 @@ -import type { QueryResultRow } from 'pg'; -import type { PostgreSQLClient } from './client'; -import type { WhereCondition, JoinCondition, OrderByCondition, QueryResult } from './types'; - -/** - * PostgreSQL Query Builder - * - * Provides a fluent interface for building SQL queries - */ -export class PostgreSQLQueryBuilder { - private queryType: 'SELECT' | 'INSERT' | 'UPDATE' | 'DELETE' | null = null; - private selectColumns: string[] = []; - private fromTable: string = ''; - private joins: JoinCondition[] = []; - private whereConditions: WhereCondition[] = []; - private groupByColumns: string[] = []; - private havingConditions: WhereCondition[] = []; - private orderByConditions: OrderByCondition[] = []; - private limitCount: number | null = null; - private offsetCount: number | null = null; - private insertValues: Record = {}; - private updateValues: Record = {}; - - private readonly client: PostgreSQLClient; - - constructor(client: PostgreSQLClient) { - this.client = client; - } - - /** - * SELECT statement - */ - select(columns: string | string[] = '*'): this { - this.queryType = 'SELECT'; - this.selectColumns = Array.isArray(columns) ? columns : [columns]; - return this; - } - - /** - * FROM clause - */ - from(table: string): this { - this.fromTable = table; - return this; - } - - /** - * JOIN clause - */ - join(table: string, on: string, type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL' = 'INNER'): this { - this.joins.push({ type, table, on }); - return this; - } - - /** - * WHERE clause - */ - where(column: string, operator: string, value?: any): this { - this.whereConditions.push({ column, operator: operator as any, value }); - return this; - } - - /** - * GROUP BY clause - */ - groupBy(columns: string | string[]): this { - this.groupByColumns = Array.isArray(columns) ? columns : [columns]; - return this; - } - - /** - * ORDER BY clause - */ - orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): this { - this.orderByConditions.push({ column, direction }); - return this; - } - - /** - * LIMIT clause - */ - limit(count: number): this { - this.limitCount = count; - return this; - } - - /** - * OFFSET clause - */ - offset(count: number): this { - this.offsetCount = count; - return this; - } - - /** - * INSERT statement - */ - insert(table: string): this { - this.queryType = 'INSERT'; - this.fromTable = table; - return this; - } - - /** - * VALUES for INSERT - */ - values(data: Record): this { - this.insertValues = data; - return this; - } - - /** - * UPDATE statement - */ - update(table: string): this { - this.queryType = 'UPDATE'; - this.fromTable = table; - return this; - } - - /** - * SET for UPDATE - */ - set(data: Record): this { - this.updateValues = data; - return this; - } - - /** - * DELETE statement - */ - delete(table: string): this { - this.queryType = 'DELETE'; - this.fromTable = table; - return this; - } - - /** - * Build and execute the query - */ - async execute(): Promise> { - const { sql, params } = this.build(); - return await this.client.query(sql, params); - } - - /** - * Build the SQL query - */ - build(): { sql: string; params: any[] } { - const params: any[] = []; - let sql = ''; - - switch (this.queryType) { - case 'SELECT': - sql = this.buildSelectQuery(params); - break; - case 'INSERT': - sql = this.buildInsertQuery(params); - break; - case 'UPDATE': - sql = this.buildUpdateQuery(params); - break; - case 'DELETE': - sql = this.buildDeleteQuery(params); - break; - default: - throw new Error('Query type not specified'); - } - - return { sql, params }; - } - - private buildSelectQuery(params: any[]): string { - let sql = `SELECT ${this.selectColumns.join(', ')}`; - - if (this.fromTable) { - sql += ` FROM ${this.fromTable}`; - } - - // Add JOINs - for (const join of this.joins) { - sql += ` ${join.type} JOIN ${join.table} ON ${join.on}`; - } - - // Add WHERE - if (this.whereConditions.length > 0) { - sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params); - } - - // Add GROUP BY - if (this.groupByColumns.length > 0) { - sql += ` GROUP BY ${this.groupByColumns.join(', ')}`; - } - - // Add HAVING - if (this.havingConditions.length > 0) { - sql += ' HAVING ' + this.buildWhereClause(this.havingConditions, params); - } - - // Add ORDER BY - if (this.orderByConditions.length > 0) { - const orderBy = this.orderByConditions - .map(order => `${order.column} ${order.direction}`) - .join(', '); - sql += ` ORDER BY ${orderBy}`; - } - - // Add LIMIT - if (this.limitCount !== null) { - sql += ` LIMIT $${params.length + 1}`; - params.push(this.limitCount); - } - - // Add OFFSET - if (this.offsetCount !== null) { - sql += ` OFFSET $${params.length + 1}`; - params.push(this.offsetCount); - } - - return sql; - } - - private buildInsertQuery(params: any[]): string { - const columns = Object.keys(this.insertValues); - const placeholders = columns.map((_, i) => `$${params.length + i + 1}`); - - params.push(...Object.values(this.insertValues)); - - return `INSERT INTO ${this.fromTable} (${columns.join(', ')}) VALUES (${placeholders.join(', ')})`; - } - - private buildUpdateQuery(params: any[]): string { - const sets = Object.keys(this.updateValues).map((key, i) => { - return `${key} = $${params.length + i + 1}`; - }); - - params.push(...Object.values(this.updateValues)); - - let sql = `UPDATE ${this.fromTable} SET ${sets.join(', ')}`; - - if (this.whereConditions.length > 0) { - sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params); - } - - return sql; - } - - private buildDeleteQuery(params: any[]): string { - let sql = `DELETE FROM ${this.fromTable}`; - - if (this.whereConditions.length > 0) { - sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params); - } - - return sql; - } - - private buildWhereClause(conditions: WhereCondition[], params: any[]): string { - return conditions.map(condition => { - if (condition.operator === 'IS NULL' || condition.operator === 'IS NOT NULL') { - return `${condition.column} ${condition.operator}`; - } else { - params.push(condition.value); - return `${condition.column} ${condition.operator} $${params.length}`; - } - }).join(' AND '); - } -} +import type { QueryResultRow } from 'pg'; +import type { PostgreSQLClient } from './client'; +import type { WhereCondition, JoinCondition, OrderByCondition, QueryResult } from './types'; + +/** + * PostgreSQL Query Builder + * + * Provides a fluent interface for building SQL queries + */ +export class PostgreSQLQueryBuilder { + private queryType: 'SELECT' | 'INSERT' | 'UPDATE' | 'DELETE' | null = null; + private selectColumns: string[] = []; + private fromTable: string = ''; + private joins: JoinCondition[] = []; + private whereConditions: WhereCondition[] = []; + private groupByColumns: string[] = []; + private havingConditions: WhereCondition[] = []; + private orderByConditions: OrderByCondition[] = []; + private limitCount: number | null = null; + private offsetCount: number | null = null; + private insertValues: Record = {}; + private updateValues: Record = {}; + + private readonly client: PostgreSQLClient; + + constructor(client: PostgreSQLClient) { + this.client = client; + } + + /** + * SELECT statement + */ + select(columns: string | string[] = '*'): this { + this.queryType = 'SELECT'; + this.selectColumns = Array.isArray(columns) ? columns : [columns]; + return this; + } + + /** + * FROM clause + */ + from(table: string): this { + this.fromTable = table; + return this; + } + + /** + * JOIN clause + */ + join(table: string, on: string, type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL' = 'INNER'): this { + this.joins.push({ type, table, on }); + return this; + } + + /** + * WHERE clause + */ + where(column: string, operator: string, value?: any): this { + this.whereConditions.push({ column, operator: operator as any, value }); + return this; + } + + /** + * GROUP BY clause + */ + groupBy(columns: string | string[]): this { + this.groupByColumns = Array.isArray(columns) ? columns : [columns]; + return this; + } + + /** + * ORDER BY clause + */ + orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): this { + this.orderByConditions.push({ column, direction }); + return this; + } + + /** + * LIMIT clause + */ + limit(count: number): this { + this.limitCount = count; + return this; + } + + /** + * OFFSET clause + */ + offset(count: number): this { + this.offsetCount = count; + return this; + } + + /** + * INSERT statement + */ + insert(table: string): this { + this.queryType = 'INSERT'; + this.fromTable = table; + return this; + } + + /** + * VALUES for INSERT + */ + values(data: Record): this { + this.insertValues = data; + return this; + } + + /** + * UPDATE statement + */ + update(table: string): this { + this.queryType = 'UPDATE'; + this.fromTable = table; + return this; + } + + /** + * SET for UPDATE + */ + set(data: Record): this { + this.updateValues = data; + return this; + } + + /** + * DELETE statement + */ + delete(table: string): this { + this.queryType = 'DELETE'; + this.fromTable = table; + return this; + } + + /** + * Build and execute the query + */ + async execute(): Promise> { + const { sql, params } = this.build(); + return await this.client.query(sql, params); + } + + /** + * Build the SQL query + */ + build(): { sql: string; params: any[] } { + const params: any[] = []; + let sql = ''; + + switch (this.queryType) { + case 'SELECT': + sql = this.buildSelectQuery(params); + break; + case 'INSERT': + sql = this.buildInsertQuery(params); + break; + case 'UPDATE': + sql = this.buildUpdateQuery(params); + break; + case 'DELETE': + sql = this.buildDeleteQuery(params); + break; + default: + throw new Error('Query type not specified'); + } + + return { sql, params }; + } + + private buildSelectQuery(params: any[]): string { + let sql = `SELECT ${this.selectColumns.join(', ')}`; + + if (this.fromTable) { + sql += ` FROM ${this.fromTable}`; + } + + // Add JOINs + for (const join of this.joins) { + sql += ` ${join.type} JOIN ${join.table} ON ${join.on}`; + } + + // Add WHERE + if (this.whereConditions.length > 0) { + sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params); + } + + // Add GROUP BY + if (this.groupByColumns.length > 0) { + sql += ` GROUP BY ${this.groupByColumns.join(', ')}`; + } + + // Add HAVING + if (this.havingConditions.length > 0) { + sql += ' HAVING ' + this.buildWhereClause(this.havingConditions, params); + } + + // Add ORDER BY + if (this.orderByConditions.length > 0) { + const orderBy = this.orderByConditions + .map(order => `${order.column} ${order.direction}`) + .join(', '); + sql += ` ORDER BY ${orderBy}`; + } + + // Add LIMIT + if (this.limitCount !== null) { + sql += ` LIMIT $${params.length + 1}`; + params.push(this.limitCount); + } + + // Add OFFSET + if (this.offsetCount !== null) { + sql += ` OFFSET $${params.length + 1}`; + params.push(this.offsetCount); + } + + return sql; + } + + private buildInsertQuery(params: any[]): string { + const columns = Object.keys(this.insertValues); + const placeholders = columns.map((_, i) => `$${params.length + i + 1}`); + + params.push(...Object.values(this.insertValues)); + + return `INSERT INTO ${this.fromTable} (${columns.join(', ')}) VALUES (${placeholders.join(', ')})`; + } + + private buildUpdateQuery(params: any[]): string { + const sets = Object.keys(this.updateValues).map((key, i) => { + return `${key} = $${params.length + i + 1}`; + }); + + params.push(...Object.values(this.updateValues)); + + let sql = `UPDATE ${this.fromTable} SET ${sets.join(', ')}`; + + if (this.whereConditions.length > 0) { + sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params); + } + + return sql; + } + + private buildDeleteQuery(params: any[]): string { + let sql = `DELETE FROM ${this.fromTable}`; + + if (this.whereConditions.length > 0) { + sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params); + } + + return sql; + } + + private buildWhereClause(conditions: WhereCondition[], params: any[]): string { + return conditions.map(condition => { + if (condition.operator === 'IS NULL' || condition.operator === 'IS NOT NULL') { + return `${condition.column} ${condition.operator}`; + } else { + params.push(condition.value); + return `${condition.column} ${condition.operator} $${params.length}`; + } + }).join(' AND '); + } +} diff --git a/libs/postgres-client/src/transactions.ts b/libs/postgres-client/src/transactions.ts index ced7a80..1f33d8f 100644 --- a/libs/postgres-client/src/transactions.ts +++ b/libs/postgres-client/src/transactions.ts @@ -1,57 +1,57 @@ -import { PoolClient } from 'pg'; -import { getLogger } from '@stock-bot/logger'; -import type { PostgreSQLClient } from './client'; -import type { TransactionCallback } from './types'; - -/** - * PostgreSQL Transaction Manager - * - * Provides transaction support for multi-statement operations - */ -export class PostgreSQLTransactionManager { - private readonly client: PostgreSQLClient; - private readonly logger: ReturnType; - - constructor(client: PostgreSQLClient) { - this.client = client; - this.logger = getLogger('postgres-transaction-manager'); - } - - /** - * Execute operations within a transaction - */ - async execute(callback: TransactionCallback): Promise { - const pool = this.client.connectionPool; - if (!pool) { - throw new Error('PostgreSQL client not connected'); - } - - const client = await pool.connect(); - - try { - this.logger.debug('Starting PostgreSQL transaction'); - - await client.query('BEGIN'); - - const result = await callback(client); - - await client.query('COMMIT'); - - this.logger.debug('PostgreSQL transaction committed successfully'); - return result; - - } catch (error) { - this.logger.error('PostgreSQL transaction failed, rolling back:', error); - - try { - await client.query('ROLLBACK'); - } catch (rollbackError) { - this.logger.error('Failed to rollback transaction:', rollbackError); - } - - throw error; - } finally { - client.release(); - } - } -} +import { PoolClient } from 'pg'; +import { getLogger } from '@stock-bot/logger'; +import type { PostgreSQLClient } from './client'; +import type { TransactionCallback } from './types'; + +/** + * PostgreSQL Transaction Manager + * + * Provides transaction support for multi-statement operations + */ +export class PostgreSQLTransactionManager { + private readonly client: PostgreSQLClient; + private readonly logger: ReturnType; + + constructor(client: PostgreSQLClient) { + this.client = client; + this.logger = getLogger('postgres-transaction-manager'); + } + + /** + * Execute operations within a transaction + */ + async execute(callback: TransactionCallback): Promise { + const pool = this.client.connectionPool; + if (!pool) { + throw new Error('PostgreSQL client not connected'); + } + + const client = await pool.connect(); + + try { + this.logger.debug('Starting PostgreSQL transaction'); + + await client.query('BEGIN'); + + const result = await callback(client); + + await client.query('COMMIT'); + + this.logger.debug('PostgreSQL transaction committed successfully'); + return result; + + } catch (error) { + this.logger.error('PostgreSQL transaction failed, rolling back:', error); + + try { + await client.query('ROLLBACK'); + } catch (rollbackError) { + this.logger.error('Failed to rollback transaction:', rollbackError); + } + + throw error; + } finally { + client.release(); + } + } +} diff --git a/libs/postgres-client/src/types.ts b/libs/postgres-client/src/types.ts index 0c5a7ac..fb7d7a0 100644 --- a/libs/postgres-client/src/types.ts +++ b/libs/postgres-client/src/types.ts @@ -1,206 +1,206 @@ -import type { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg'; - -/** - * PostgreSQL Client Configuration - */ -export interface PostgreSQLClientConfig { - host: string; - port: number; - database: string; - username: string; - password: string; - poolSettings?: { - min: number; - max: number; - idleTimeoutMillis: number; - }; - ssl?: { - enabled: boolean; - rejectUnauthorized: boolean; - }; - timeouts?: { - query: number; - connection: number; - statement: number; - lock: number; - idleInTransaction: number; - }; -} - -/** - * PostgreSQL Connection Options - */ -export interface PostgreSQLConnectionOptions { - retryAttempts?: number; - retryDelay?: number; - healthCheckInterval?: number; -} - -/** - * Health Status Types - */ -export type PostgreSQLHealthStatus = 'healthy' | 'degraded' | 'unhealthy'; - -export interface PostgreSQLHealthCheck { - status: PostgreSQLHealthStatus; - timestamp: Date; - latency: number; - connections: { - active: number; - idle: number; - total: number; - }; - errors?: string[]; -} - -export interface PostgreSQLMetrics { - queriesPerSecond: number; - averageQueryTime: number; - errorRate: number; - connectionPoolUtilization: number; - slowQueries: number; -} - -/** - * Query Result Types - */ -export interface QueryResult extends PgQueryResult { - executionTime?: number; -} - -export type TransactionCallback = (client: PoolClient) => Promise; - -/** - * Schema and Table Names - */ -export type SchemaNames = 'trading' | 'strategy' | 'risk' | 'audit'; - -export type TableNames = - | 'trades' - | 'orders' - | 'positions' - | 'portfolios' - | 'strategies' - | 'risk_limits' - | 'audit_logs' - | 'users' - | 'accounts' - | 'symbols' - | 'exchanges'; - -/** - * Trading Domain Types - */ -export interface Trade { - id: string; - order_id: string; - symbol: string; - side: 'buy' | 'sell'; - quantity: number; - price: number; - executed_at: Date; - commission: number; - fees: number; - portfolio_id: string; - strategy_id?: string; - created_at: Date; - updated_at: Date; -} - -export interface Order { - id: string; - symbol: string; - side: 'buy' | 'sell'; - type: 'market' | 'limit' | 'stop' | 'stop_limit'; - quantity: number; - price?: number; - stop_price?: number; - status: 'pending' | 'filled' | 'cancelled' | 'rejected'; - portfolio_id: string; - strategy_id?: string; - created_at: Date; - updated_at: Date; - expires_at?: Date; -} - -export interface Position { - id: string; - symbol: string; - quantity: number; - average_cost: number; - market_value: number; - unrealized_pnl: number; - realized_pnl: number; - portfolio_id: string; - created_at: Date; - updated_at: Date; -} - -export interface Portfolio { - id: string; - name: string; - cash_balance: number; - total_value: number; - unrealized_pnl: number; - realized_pnl: number; - user_id: string; - created_at: Date; - updated_at: Date; -} - -export interface Strategy { - id: string; - name: string; - description: string; - parameters: Record; - status: 'active' | 'inactive' | 'paused'; - performance_metrics: Record; - portfolio_id: string; - created_at: Date; - updated_at: Date; -} - -export interface RiskLimit { - id: string; - type: 'position_size' | 'daily_loss' | 'max_drawdown' | 'concentration'; - value: number; - threshold: number; - status: 'active' | 'breached' | 'disabled'; - portfolio_id?: string; - strategy_id?: string; - created_at: Date; - updated_at: Date; -} - -export interface AuditLog { - id: string; - action: string; - entity_type: string; - entity_id: string; - old_values?: Record; - new_values?: Record; - user_id?: string; - ip_address?: string; - user_agent?: string; - timestamp: Date; -} - -/** - * Query Builder Types - */ -export interface WhereCondition { - column: string; - operator: '=' | '!=' | '>' | '<' | '>=' | '<=' | 'IN' | 'NOT IN' | 'LIKE' | 'ILIKE' | 'IS NULL' | 'IS NOT NULL'; - value?: any; -} - -export interface JoinCondition { - type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL'; - table: string; - on: string; -} - -export interface OrderByCondition { - column: string; - direction: 'ASC' | 'DESC'; -} +import type { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg'; + +/** + * PostgreSQL Client Configuration + */ +export interface PostgreSQLClientConfig { + host: string; + port: number; + database: string; + username: string; + password: string; + poolSettings?: { + min: number; + max: number; + idleTimeoutMillis: number; + }; + ssl?: { + enabled: boolean; + rejectUnauthorized: boolean; + }; + timeouts?: { + query: number; + connection: number; + statement: number; + lock: number; + idleInTransaction: number; + }; +} + +/** + * PostgreSQL Connection Options + */ +export interface PostgreSQLConnectionOptions { + retryAttempts?: number; + retryDelay?: number; + healthCheckInterval?: number; +} + +/** + * Health Status Types + */ +export type PostgreSQLHealthStatus = 'healthy' | 'degraded' | 'unhealthy'; + +export interface PostgreSQLHealthCheck { + status: PostgreSQLHealthStatus; + timestamp: Date; + latency: number; + connections: { + active: number; + idle: number; + total: number; + }; + errors?: string[]; +} + +export interface PostgreSQLMetrics { + queriesPerSecond: number; + averageQueryTime: number; + errorRate: number; + connectionPoolUtilization: number; + slowQueries: number; +} + +/** + * Query Result Types + */ +export interface QueryResult extends PgQueryResult { + executionTime?: number; +} + +export type TransactionCallback = (client: PoolClient) => Promise; + +/** + * Schema and Table Names + */ +export type SchemaNames = 'trading' | 'strategy' | 'risk' | 'audit'; + +export type TableNames = + | 'trades' + | 'orders' + | 'positions' + | 'portfolios' + | 'strategies' + | 'risk_limits' + | 'audit_logs' + | 'users' + | 'accounts' + | 'symbols' + | 'exchanges'; + +/** + * Trading Domain Types + */ +export interface Trade { + id: string; + order_id: string; + symbol: string; + side: 'buy' | 'sell'; + quantity: number; + price: number; + executed_at: Date; + commission: number; + fees: number; + portfolio_id: string; + strategy_id?: string; + created_at: Date; + updated_at: Date; +} + +export interface Order { + id: string; + symbol: string; + side: 'buy' | 'sell'; + type: 'market' | 'limit' | 'stop' | 'stop_limit'; + quantity: number; + price?: number; + stop_price?: number; + status: 'pending' | 'filled' | 'cancelled' | 'rejected'; + portfolio_id: string; + strategy_id?: string; + created_at: Date; + updated_at: Date; + expires_at?: Date; +} + +export interface Position { + id: string; + symbol: string; + quantity: number; + average_cost: number; + market_value: number; + unrealized_pnl: number; + realized_pnl: number; + portfolio_id: string; + created_at: Date; + updated_at: Date; +} + +export interface Portfolio { + id: string; + name: string; + cash_balance: number; + total_value: number; + unrealized_pnl: number; + realized_pnl: number; + user_id: string; + created_at: Date; + updated_at: Date; +} + +export interface Strategy { + id: string; + name: string; + description: string; + parameters: Record; + status: 'active' | 'inactive' | 'paused'; + performance_metrics: Record; + portfolio_id: string; + created_at: Date; + updated_at: Date; +} + +export interface RiskLimit { + id: string; + type: 'position_size' | 'daily_loss' | 'max_drawdown' | 'concentration'; + value: number; + threshold: number; + status: 'active' | 'breached' | 'disabled'; + portfolio_id?: string; + strategy_id?: string; + created_at: Date; + updated_at: Date; +} + +export interface AuditLog { + id: string; + action: string; + entity_type: string; + entity_id: string; + old_values?: Record; + new_values?: Record; + user_id?: string; + ip_address?: string; + user_agent?: string; + timestamp: Date; +} + +/** + * Query Builder Types + */ +export interface WhereCondition { + column: string; + operator: '=' | '!=' | '>' | '<' | '>=' | '<=' | 'IN' | 'NOT IN' | 'LIKE' | 'ILIKE' | 'IS NULL' | 'IS NOT NULL'; + value?: any; +} + +export interface JoinCondition { + type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL'; + table: string; + on: string; +} + +export interface OrderByCondition { + column: string; + direction: 'ASC' | 'DESC'; +} diff --git a/libs/postgres-client/tsconfig.json b/libs/postgres-client/tsconfig.json index 3030b42..e8f78e0 100644 --- a/libs/postgres-client/tsconfig.json +++ b/libs/postgres-client/tsconfig.json @@ -1,13 +1,13 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../config" }, - { "path": "../logger" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../config" }, + { "path": "../logger" } + ] +} diff --git a/libs/postgres-client/turbo.json b/libs/postgres-client/turbo.json index 792d858..92c4460 100644 --- a/libs/postgres-client/turbo.json +++ b/libs/postgres-client/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/questdb-client/README.md b/libs/questdb-client/README.md index ff8a7fd..e69884d 100644 --- a/libs/questdb-client/README.md +++ b/libs/questdb-client/README.md @@ -1,102 +1,102 @@ -# QuestDB Client Library - -A comprehensive QuestDB client library for the Stock Bot trading platform, optimized for time-series data, market analytics, and high-performance queries. - -## Features - -- **Time-Series Optimized**: Built specifically for time-series data patterns -- **Dual Protocol Support**: HTTP REST API and PostgreSQL wire protocol -- **InfluxDB Line Protocol**: High-performance data ingestion -- **SQL Analytics**: Full SQL support for complex analytics -- **Schema Management**: Automatic table creation and partitioning -- **Performance Monitoring**: Query performance tracking and optimization -- **Health Monitoring**: Connection health monitoring and metrics - -## Usage - -```typescript -import { QuestDBClient } from '@stock-bot/questdb-client'; - -// Initialize client -const questClient = new QuestDBClient(); -await questClient.connect(); - -// Insert market data using InfluxDB Line Protocol -await questClient.insert('ohlcv', { - symbol: 'AAPL', - open: 150.00, - high: 152.00, - low: 149.50, - close: 151.50, - volume: 1000000, - timestamp: new Date() -}); - -// Query with SQL -const prices = await questClient.query(` - SELECT symbol, close, timestamp - FROM ohlcv - WHERE symbol = 'AAPL' - AND timestamp > dateadd('d', -1, now()) - ORDER BY timestamp DESC -`); - -// Time-series aggregations -const dailyStats = await questClient.aggregate('ohlcv') - .select(['symbol', 'avg(close) as avg_price']) - .where('symbol = ?', ['AAPL']) - .groupBy('symbol') - .sampleBy('1d', 'timestamp') - .execute(); -``` - -## Data Types - -The client provides typed access to the following time-series data: - -- **ohlcv**: OHLCV candlestick data -- **trades**: Individual trade executions -- **quotes**: Bid/ask quote data -- **indicators**: Technical indicator values -- **performance**: Portfolio performance metrics -- **risk_metrics**: Risk calculation results - -## Configuration - -Configure using environment variables: - -```env -QUESTDB_HOST=localhost -QUESTDB_HTTP_PORT=9000 -QUESTDB_PG_PORT=8812 -QUESTDB_INFLUX_PORT=9009 -``` - -## Time-Series Features - -QuestDB excels at: - -- **High-frequency data**: Millions of data points per second -- **Time-based partitioning**: Automatic partitioning by time -- **ASOF JOINs**: Time-series specific joins -- **SAMPLE BY**: Time-based aggregations -- **LATEST BY**: Get latest values by key - -## Performance - -The client includes performance optimizations: - -- Connection pooling for HTTP and PostgreSQL protocols -- Batch insertions for high throughput -- Compressed data transfer -- Query result caching -- Automatic schema optimization - -## Health Monitoring - -Built-in health monitoring: - -```typescript -const health = await questClient.getHealth(); -console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy' -``` +# QuestDB Client Library + +A comprehensive QuestDB client library for the Stock Bot trading platform, optimized for time-series data, market analytics, and high-performance queries. + +## Features + +- **Time-Series Optimized**: Built specifically for time-series data patterns +- **Dual Protocol Support**: HTTP REST API and PostgreSQL wire protocol +- **InfluxDB Line Protocol**: High-performance data ingestion +- **SQL Analytics**: Full SQL support for complex analytics +- **Schema Management**: Automatic table creation and partitioning +- **Performance Monitoring**: Query performance tracking and optimization +- **Health Monitoring**: Connection health monitoring and metrics + +## Usage + +```typescript +import { QuestDBClient } from '@stock-bot/questdb-client'; + +// Initialize client +const questClient = new QuestDBClient(); +await questClient.connect(); + +// Insert market data using InfluxDB Line Protocol +await questClient.insert('ohlcv', { + symbol: 'AAPL', + open: 150.00, + high: 152.00, + low: 149.50, + close: 151.50, + volume: 1000000, + timestamp: new Date() +}); + +// Query with SQL +const prices = await questClient.query(` + SELECT symbol, close, timestamp + FROM ohlcv + WHERE symbol = 'AAPL' + AND timestamp > dateadd('d', -1, now()) + ORDER BY timestamp DESC +`); + +// Time-series aggregations +const dailyStats = await questClient.aggregate('ohlcv') + .select(['symbol', 'avg(close) as avg_price']) + .where('symbol = ?', ['AAPL']) + .groupBy('symbol') + .sampleBy('1d', 'timestamp') + .execute(); +``` + +## Data Types + +The client provides typed access to the following time-series data: + +- **ohlcv**: OHLCV candlestick data +- **trades**: Individual trade executions +- **quotes**: Bid/ask quote data +- **indicators**: Technical indicator values +- **performance**: Portfolio performance metrics +- **risk_metrics**: Risk calculation results + +## Configuration + +Configure using environment variables: + +```env +QUESTDB_HOST=localhost +QUESTDB_HTTP_PORT=9000 +QUESTDB_PG_PORT=8812 +QUESTDB_INFLUX_PORT=9009 +``` + +## Time-Series Features + +QuestDB excels at: + +- **High-frequency data**: Millions of data points per second +- **Time-based partitioning**: Automatic partitioning by time +- **ASOF JOINs**: Time-series specific joins +- **SAMPLE BY**: Time-based aggregations +- **LATEST BY**: Get latest values by key + +## Performance + +The client includes performance optimizations: + +- Connection pooling for HTTP and PostgreSQL protocols +- Batch insertions for high throughput +- Compressed data transfer +- Query result caching +- Automatic schema optimization + +## Health Monitoring + +Built-in health monitoring: + +```typescript +const health = await questClient.getHealth(); +console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy' +``` diff --git a/libs/questdb-client/bunfig.toml b/libs/questdb-client/bunfig.toml index 0bd9480..60bf05a 100644 --- a/libs/questdb-client/bunfig.toml +++ b/libs/questdb-client/bunfig.toml @@ -1,14 +1,14 @@ -# QuestDB Client Library Bun Test Configuration - -[test] -# Configure path mapping for tests -preload = ["./test/setup.ts"] - -# Test configuration -timeout = 5000 - -# Enable TypeScript paths resolution -[bun] -paths = { - "@/*" = ["./src/*"] -} +# QuestDB Client Library Bun Test Configuration + +[test] +# Configure path mapping for tests +preload = ["./test/setup.ts"] + +# Test configuration +timeout = 5000 + +# Enable TypeScript paths resolution +[bun] +paths = { + "@/*" = ["./src/*"] +} diff --git a/libs/questdb-client/package.json b/libs/questdb-client/package.json index 8c5c3b6..62d9d0e 100644 --- a/libs/questdb-client/package.json +++ b/libs/questdb-client/package.json @@ -1,45 +1,45 @@ -{ - "name": "@stock-bot/questdb-client", - "version": "1.0.0", - "description": "QuestDB client library for Stock Bot platform", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "test": "bun test", - "lint": "eslint src/**/*.ts", - "type-check": "tsc --noEmit", - "clean": "rimraf dist" - }, - "dependencies": { - "@stock-bot/config": "*", - "@stock-bot/logger": "*", - "@stock-bot/types": "*" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "eslint": "^8.56.0", - "@typescript-eslint/eslint-plugin": "^6.19.0", - "@typescript-eslint/parser": "^6.19.0", - "bun-types": "^1.2.15" - }, - "keywords": [ - "questdb", - "database", - "client", - "stock-bot" - ], - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] -} +{ + "name": "@stock-bot/questdb-client", + "version": "1.0.0", + "description": "QuestDB client library for Stock Bot platform", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "test": "bun test", + "lint": "eslint src/**/*.ts", + "type-check": "tsc --noEmit", + "clean": "rimraf dist" + }, + "dependencies": { + "@stock-bot/config": "*", + "@stock-bot/logger": "*", + "@stock-bot/types": "*" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "eslint": "^8.56.0", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "bun-types": "^1.2.15" + }, + "keywords": [ + "questdb", + "database", + "client", + "stock-bot" + ], + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] +} diff --git a/libs/questdb-client/src/client.ts b/libs/questdb-client/src/client.ts index 639a938..d7ce3c8 100644 --- a/libs/questdb-client/src/client.ts +++ b/libs/questdb-client/src/client.ts @@ -1,471 +1,471 @@ -import { Pool } from 'pg'; -import { questdbConfig } from '@stock-bot/config'; -import { getLogger } from '@stock-bot/logger'; -import type { - QuestDBClientConfig, - QuestDBConnectionOptions, - QueryResult, - InsertResult, - BaseTimeSeriesData, - TableNames -} from './types'; -import { QuestDBHealthMonitor } from './health'; -import { QuestDBQueryBuilder } from './query-builder'; -import { QuestDBInfluxWriter } from './influx-writer'; -import { QuestDBSchemaManager } from './schema'; - -/** - * QuestDB Client for Stock Bot - * - * Provides high-performance time-series data access with support for - * multiple protocols (HTTP, PostgreSQL, InfluxDB Line Protocol). - */ -export class QuestDBClient { - private pgPool: Pool | null = null; - private readonly config: QuestDBClientConfig; - private readonly options: QuestDBConnectionOptions; - private readonly logger = getLogger('QuestDBClient'); - private readonly healthMonitor: QuestDBHealthMonitor; - private readonly influxWriter: QuestDBInfluxWriter; - private readonly schemaManager: QuestDBSchemaManager; - private isConnected = false; - - constructor( - config?: Partial, - options?: QuestDBConnectionOptions - ) { - this.config = this.buildConfig(config); - this.options = { - protocol: 'pg', - retryAttempts: 3, - retryDelay: 1000, - healthCheckInterval: 30000, - ...options - }; - - this.healthMonitor = new QuestDBHealthMonitor(this); - this.influxWriter = new QuestDBInfluxWriter(this); - this.schemaManager = new QuestDBSchemaManager(this); - } - - /** - * Connect to QuestDB - */ - async connect(): Promise { - if (this.isConnected) { - return; - } - - let lastError: Error | null = null; - - for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) { - try { - this.logger.info(`Connecting to QuestDB (attempt ${attempt}/${this.options.retryAttempts})...`); - - // Connect via PostgreSQL wire protocol - this.pgPool = new Pool(this.buildPgPoolConfig()); - - // Test the connection - const client = await this.pgPool.connect(); - await client.query('SELECT 1'); - client.release(); - - this.isConnected = true; - this.logger.info('Successfully connected to QuestDB'); - // Initialize schema - await this.schemaManager.initializeDatabase(); - - // Start health monitoring - this.healthMonitor.startMonitoring(); - - return; - } catch (error) { - lastError = error as Error; - this.logger.error(`QuestDB connection attempt ${attempt} failed:`, error); - - if (this.pgPool) { - await this.pgPool.end(); - this.pgPool = null; - } - - if (attempt < this.options.retryAttempts!) { - await this.delay(this.options.retryDelay! * attempt); - } - } - } - - throw new Error(`Failed to connect to QuestDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`); - } - - /** - * Disconnect from QuestDB - */ - async disconnect(): Promise { - if (!this.isConnected) { - return; - } try { - this.healthMonitor.stopMonitoring(); - - if (this.pgPool) { - await this.pgPool.end(); - this.pgPool = null; - } - - this.isConnected = false; - this.logger.info('Disconnected from QuestDB'); - } catch (error) { - this.logger.error('Error disconnecting from QuestDB:', error); - throw error; - } - } - - /** - * Execute a SQL query - */ - async query(sql: string, params?: any[]): Promise> { - if (!this.pgPool) { - throw new Error('QuestDB client not connected'); - } - - const startTime = Date.now(); - - try { - const result = await this.pgPool.query(sql, params); - const executionTime = Date.now() - startTime; - - this.logger.debug(`Query executed in ${executionTime}ms`, { - query: sql.substring(0, 100), - rowCount: result.rowCount - }); - - return { - rows: result.rows, - rowCount: result.rowCount || 0, - executionTime, metadata: { - columns: result.fields?.map((field: any) => ({ - name: field.name, - type: this.mapDataType(field.dataTypeID) - })) || [] - } - }; - } catch (error) { - const executionTime = Date.now() - startTime; - this.logger.error(`Query failed after ${executionTime}ms:`, { - error: (error as Error).message, - query: sql, - params - }); - throw error; - } - } - /** - * Write OHLCV data using InfluxDB Line Protocol - */ - async writeOHLCV( - symbol: string, - exchange: string, - data: Array<{ - timestamp: Date; - open: number; - high: number; - low: number; - close: number; - volume: number; - }> - ): Promise { - return await this.influxWriter.writeOHLCV(symbol, exchange, data); - } - - /** - * Write market analytics data - */ - async writeMarketAnalytics( - symbol: string, - exchange: string, - analytics: { - timestamp: Date; - rsi?: number; - macd?: number; - signal?: number; - histogram?: number; - bollinger_upper?: number; - bollinger_lower?: number; - volume_sma?: number; - } - ): Promise { - return await this.influxWriter.writeMarketAnalytics(symbol, exchange, analytics); - } - - /** - * Get a query builder instance - */ - queryBuilder(): QuestDBQueryBuilder { - return new QuestDBQueryBuilder(this); - } - /** - * Create a SELECT query builder - */ - select(...columns: string[]): QuestDBQueryBuilder { - return this.queryBuilder().select(...columns); - } - - /** - * Create an aggregation query builder - */ - aggregate(table: TableNames): QuestDBQueryBuilder { - return this.queryBuilder().from(table); - } - - /** - * Execute a time-series specific query with SAMPLE BY - */ - async sampleBy( - table: TableNames, - columns: string[], - interval: string, - timeColumn: string = 'timestamp', - where?: string, - params?: any[] - ): Promise> { - const columnsStr = columns.join(', '); - const whereClause = where ? `WHERE ${where}` : ''; - - const sql = ` - SELECT ${columnsStr} - FROM ${table} - ${whereClause} - SAMPLE BY ${interval} - ALIGN TO CALENDAR - `; - - return await this.query(sql, params); - } - - /** - * Get latest values by symbol using LATEST BY - */ - async latestBy( - table: TableNames, - columns: string | string[] = '*', - keyColumns: string | string[] = 'symbol' - ): Promise> { - const columnsStr = Array.isArray(columns) ? columns.join(', ') : columns; - const keyColumnsStr = Array.isArray(keyColumns) ? keyColumns.join(', ') : keyColumns; - - const sql = ` - SELECT ${columnsStr} - FROM ${table} - LATEST BY ${keyColumnsStr} - `; - - return await this.query(sql); - } - - /** - * Execute ASOF JOIN for time-series correlation - */ - async asofJoin( - leftTable: TableNames, - rightTable: TableNames, - joinCondition: string, - columns?: string[], - where?: string, - params?: any[] - ): Promise> { - const columnsStr = columns ? columns.join(', ') : '*'; - const whereClause = where ? `WHERE ${where}` : ''; - - const sql = ` - SELECT ${columnsStr} - FROM ${leftTable} - ASOF JOIN ${rightTable} ON ${joinCondition} - ${whereClause} - `; - - return await this.query(sql, params); - } - - /** - * Get database statistics - */ - async getStats(): Promise { - const result = await this.query(` - SELECT - table_name, - row_count, - partition_count, - size_bytes - FROM tables() - WHERE table_name NOT LIKE 'sys.%' - ORDER BY row_count DESC - `); - return result.rows; - } - - /** - * Get table information - */ - async getTableInfo(tableName: string): Promise { - const result = await this.query( - `SELECT * FROM table_columns WHERE table_name = ?`, - [tableName] - ); - return result.rows; - } - - /** - * Check if PostgreSQL pool is healthy - */ - isPgPoolHealthy(): boolean { - return this.pgPool !== null && !this.pgPool.ended; - } - - /** - * Get HTTP endpoint URL - */ - getHttpUrl(): string { - const protocol = this.config.tls?.enabled ? 'https' : 'http'; - return `${protocol}://${this.config.host}:${this.config.httpPort}`; - } - - /** - * Get InfluxDB endpoint URL - */ - getInfluxUrl(): string { - const protocol = this.config.tls?.enabled ? 'https' : 'http'; - return `${protocol}://${this.config.host}:${this.config.influxPort}`; - } - - /** - * Get health monitor instance - */ - getHealthMonitor(): QuestDBHealthMonitor { - return this.healthMonitor; - } - - /** - * Get schema manager instance - */ - getSchemaManager(): QuestDBSchemaManager { - return this.schemaManager; - } - - /** - * Get InfluxDB writer instance - */ - getInfluxWriter(): QuestDBInfluxWriter { - return this.influxWriter; - } - - /** - * Optimize table by rebuilding partitions - */ - async optimizeTable(tableName: string): Promise { - await this.query(`VACUUM TABLE ${tableName}`); - this.logger.info(`Optimized table: ${tableName}`); - } - - /** - * Create a table with time-series optimizations - */ - async createTable( - tableName: string, - columns: string, - partitionBy: string = 'DAY', - timestampColumn: string = 'timestamp' - ): Promise { - const sql = ` - CREATE TABLE IF NOT EXISTS ${tableName} ( - ${columns} - ) TIMESTAMP(${timestampColumn}) PARTITION BY ${partitionBy} - `; - - await this.query(sql); - this.logger.info(`Created table: ${tableName}`); - } - - /** - * Check if client is connected - */ - get connected(): boolean { - return this.isConnected && !!this.pgPool; - } - - /** - * Get the PostgreSQL connection pool - */ - get connectionPool(): Pool | null { - return this.pgPool; - } - - /** - * Get configuration - */ - get configuration(): QuestDBClientConfig { - return { ...this.config }; - } - - private buildConfig(config?: Partial): QuestDBClientConfig { - return { - host: config?.host || questdbConfig.QUESTDB_HOST, - httpPort: config?.httpPort || questdbConfig.QUESTDB_HTTP_PORT, - pgPort: config?.pgPort || questdbConfig.QUESTDB_PG_PORT, - influxPort: config?.influxPort || questdbConfig.QUESTDB_INFLUX_PORT, - user: config?.user || questdbConfig.QUESTDB_USER, - password: config?.password || questdbConfig.QUESTDB_PASSWORD, - database: config?.database || questdbConfig.QUESTDB_DEFAULT_DATABASE, - tls: { - enabled: questdbConfig.QUESTDB_TLS_ENABLED, - verifyServerCert: questdbConfig.QUESTDB_TLS_VERIFY_SERVER_CERT, - ...config?.tls - }, - timeouts: { - connection: questdbConfig.QUESTDB_CONNECTION_TIMEOUT, - request: questdbConfig.QUESTDB_REQUEST_TIMEOUT, - ...config?.timeouts - }, - retryAttempts: questdbConfig.QUESTDB_RETRY_ATTEMPTS, - ...config - }; - } - - private buildPgPoolConfig(): any { - return { - host: this.config.host, - port: this.config.pgPort, - database: this.config.database, - user: this.config.user, - password: this.config.password, - connectionTimeoutMillis: this.config.timeouts?.connection, - query_timeout: this.config.timeouts?.request, - ssl: this.config.tls?.enabled ? { - rejectUnauthorized: this.config.tls.verifyServerCert - } : false, - min: 2, - max: 10 - }; - } - - private mapDataType(typeId: number): string { - // Map PostgreSQL type IDs to QuestDB types - const typeMap: Record = { - 16: 'BOOLEAN', - 20: 'LONG', - 21: 'INT', - 23: 'INT', - 25: 'STRING', - 700: 'FLOAT', - 701: 'DOUBLE', - 1043: 'STRING', - 1082: 'DATE', - 1114: 'TIMESTAMP', - 1184: 'TIMESTAMP' - }; - - return typeMap[typeId] || 'STRING'; - } - - private delay(ms: number): Promise { - return new Promise(resolve => setTimeout(resolve, ms)); - } -} +import { Pool } from 'pg'; +import { questdbConfig } from '@stock-bot/config'; +import { getLogger } from '@stock-bot/logger'; +import type { + QuestDBClientConfig, + QuestDBConnectionOptions, + QueryResult, + InsertResult, + BaseTimeSeriesData, + TableNames +} from './types'; +import { QuestDBHealthMonitor } from './health'; +import { QuestDBQueryBuilder } from './query-builder'; +import { QuestDBInfluxWriter } from './influx-writer'; +import { QuestDBSchemaManager } from './schema'; + +/** + * QuestDB Client for Stock Bot + * + * Provides high-performance time-series data access with support for + * multiple protocols (HTTP, PostgreSQL, InfluxDB Line Protocol). + */ +export class QuestDBClient { + private pgPool: Pool | null = null; + private readonly config: QuestDBClientConfig; + private readonly options: QuestDBConnectionOptions; + private readonly logger = getLogger('QuestDBClient'); + private readonly healthMonitor: QuestDBHealthMonitor; + private readonly influxWriter: QuestDBInfluxWriter; + private readonly schemaManager: QuestDBSchemaManager; + private isConnected = false; + + constructor( + config?: Partial, + options?: QuestDBConnectionOptions + ) { + this.config = this.buildConfig(config); + this.options = { + protocol: 'pg', + retryAttempts: 3, + retryDelay: 1000, + healthCheckInterval: 30000, + ...options + }; + + this.healthMonitor = new QuestDBHealthMonitor(this); + this.influxWriter = new QuestDBInfluxWriter(this); + this.schemaManager = new QuestDBSchemaManager(this); + } + + /** + * Connect to QuestDB + */ + async connect(): Promise { + if (this.isConnected) { + return; + } + + let lastError: Error | null = null; + + for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) { + try { + this.logger.info(`Connecting to QuestDB (attempt ${attempt}/${this.options.retryAttempts})...`); + + // Connect via PostgreSQL wire protocol + this.pgPool = new Pool(this.buildPgPoolConfig()); + + // Test the connection + const client = await this.pgPool.connect(); + await client.query('SELECT 1'); + client.release(); + + this.isConnected = true; + this.logger.info('Successfully connected to QuestDB'); + // Initialize schema + await this.schemaManager.initializeDatabase(); + + // Start health monitoring + this.healthMonitor.startMonitoring(); + + return; + } catch (error) { + lastError = error as Error; + this.logger.error(`QuestDB connection attempt ${attempt} failed:`, error); + + if (this.pgPool) { + await this.pgPool.end(); + this.pgPool = null; + } + + if (attempt < this.options.retryAttempts!) { + await this.delay(this.options.retryDelay! * attempt); + } + } + } + + throw new Error(`Failed to connect to QuestDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`); + } + + /** + * Disconnect from QuestDB + */ + async disconnect(): Promise { + if (!this.isConnected) { + return; + } try { + this.healthMonitor.stopMonitoring(); + + if (this.pgPool) { + await this.pgPool.end(); + this.pgPool = null; + } + + this.isConnected = false; + this.logger.info('Disconnected from QuestDB'); + } catch (error) { + this.logger.error('Error disconnecting from QuestDB:', error); + throw error; + } + } + + /** + * Execute a SQL query + */ + async query(sql: string, params?: any[]): Promise> { + if (!this.pgPool) { + throw new Error('QuestDB client not connected'); + } + + const startTime = Date.now(); + + try { + const result = await this.pgPool.query(sql, params); + const executionTime = Date.now() - startTime; + + this.logger.debug(`Query executed in ${executionTime}ms`, { + query: sql.substring(0, 100), + rowCount: result.rowCount + }); + + return { + rows: result.rows, + rowCount: result.rowCount || 0, + executionTime, metadata: { + columns: result.fields?.map((field: any) => ({ + name: field.name, + type: this.mapDataType(field.dataTypeID) + })) || [] + } + }; + } catch (error) { + const executionTime = Date.now() - startTime; + this.logger.error(`Query failed after ${executionTime}ms:`, { + error: (error as Error).message, + query: sql, + params + }); + throw error; + } + } + /** + * Write OHLCV data using InfluxDB Line Protocol + */ + async writeOHLCV( + symbol: string, + exchange: string, + data: Array<{ + timestamp: Date; + open: number; + high: number; + low: number; + close: number; + volume: number; + }> + ): Promise { + return await this.influxWriter.writeOHLCV(symbol, exchange, data); + } + + /** + * Write market analytics data + */ + async writeMarketAnalytics( + symbol: string, + exchange: string, + analytics: { + timestamp: Date; + rsi?: number; + macd?: number; + signal?: number; + histogram?: number; + bollinger_upper?: number; + bollinger_lower?: number; + volume_sma?: number; + } + ): Promise { + return await this.influxWriter.writeMarketAnalytics(symbol, exchange, analytics); + } + + /** + * Get a query builder instance + */ + queryBuilder(): QuestDBQueryBuilder { + return new QuestDBQueryBuilder(this); + } + /** + * Create a SELECT query builder + */ + select(...columns: string[]): QuestDBQueryBuilder { + return this.queryBuilder().select(...columns); + } + + /** + * Create an aggregation query builder + */ + aggregate(table: TableNames): QuestDBQueryBuilder { + return this.queryBuilder().from(table); + } + + /** + * Execute a time-series specific query with SAMPLE BY + */ + async sampleBy( + table: TableNames, + columns: string[], + interval: string, + timeColumn: string = 'timestamp', + where?: string, + params?: any[] + ): Promise> { + const columnsStr = columns.join(', '); + const whereClause = where ? `WHERE ${where}` : ''; + + const sql = ` + SELECT ${columnsStr} + FROM ${table} + ${whereClause} + SAMPLE BY ${interval} + ALIGN TO CALENDAR + `; + + return await this.query(sql, params); + } + + /** + * Get latest values by symbol using LATEST BY + */ + async latestBy( + table: TableNames, + columns: string | string[] = '*', + keyColumns: string | string[] = 'symbol' + ): Promise> { + const columnsStr = Array.isArray(columns) ? columns.join(', ') : columns; + const keyColumnsStr = Array.isArray(keyColumns) ? keyColumns.join(', ') : keyColumns; + + const sql = ` + SELECT ${columnsStr} + FROM ${table} + LATEST BY ${keyColumnsStr} + `; + + return await this.query(sql); + } + + /** + * Execute ASOF JOIN for time-series correlation + */ + async asofJoin( + leftTable: TableNames, + rightTable: TableNames, + joinCondition: string, + columns?: string[], + where?: string, + params?: any[] + ): Promise> { + const columnsStr = columns ? columns.join(', ') : '*'; + const whereClause = where ? `WHERE ${where}` : ''; + + const sql = ` + SELECT ${columnsStr} + FROM ${leftTable} + ASOF JOIN ${rightTable} ON ${joinCondition} + ${whereClause} + `; + + return await this.query(sql, params); + } + + /** + * Get database statistics + */ + async getStats(): Promise { + const result = await this.query(` + SELECT + table_name, + row_count, + partition_count, + size_bytes + FROM tables() + WHERE table_name NOT LIKE 'sys.%' + ORDER BY row_count DESC + `); + return result.rows; + } + + /** + * Get table information + */ + async getTableInfo(tableName: string): Promise { + const result = await this.query( + `SELECT * FROM table_columns WHERE table_name = ?`, + [tableName] + ); + return result.rows; + } + + /** + * Check if PostgreSQL pool is healthy + */ + isPgPoolHealthy(): boolean { + return this.pgPool !== null && !this.pgPool.ended; + } + + /** + * Get HTTP endpoint URL + */ + getHttpUrl(): string { + const protocol = this.config.tls?.enabled ? 'https' : 'http'; + return `${protocol}://${this.config.host}:${this.config.httpPort}`; + } + + /** + * Get InfluxDB endpoint URL + */ + getInfluxUrl(): string { + const protocol = this.config.tls?.enabled ? 'https' : 'http'; + return `${protocol}://${this.config.host}:${this.config.influxPort}`; + } + + /** + * Get health monitor instance + */ + getHealthMonitor(): QuestDBHealthMonitor { + return this.healthMonitor; + } + + /** + * Get schema manager instance + */ + getSchemaManager(): QuestDBSchemaManager { + return this.schemaManager; + } + + /** + * Get InfluxDB writer instance + */ + getInfluxWriter(): QuestDBInfluxWriter { + return this.influxWriter; + } + + /** + * Optimize table by rebuilding partitions + */ + async optimizeTable(tableName: string): Promise { + await this.query(`VACUUM TABLE ${tableName}`); + this.logger.info(`Optimized table: ${tableName}`); + } + + /** + * Create a table with time-series optimizations + */ + async createTable( + tableName: string, + columns: string, + partitionBy: string = 'DAY', + timestampColumn: string = 'timestamp' + ): Promise { + const sql = ` + CREATE TABLE IF NOT EXISTS ${tableName} ( + ${columns} + ) TIMESTAMP(${timestampColumn}) PARTITION BY ${partitionBy} + `; + + await this.query(sql); + this.logger.info(`Created table: ${tableName}`); + } + + /** + * Check if client is connected + */ + get connected(): boolean { + return this.isConnected && !!this.pgPool; + } + + /** + * Get the PostgreSQL connection pool + */ + get connectionPool(): Pool | null { + return this.pgPool; + } + + /** + * Get configuration + */ + get configuration(): QuestDBClientConfig { + return { ...this.config }; + } + + private buildConfig(config?: Partial): QuestDBClientConfig { + return { + host: config?.host || questdbConfig.QUESTDB_HOST, + httpPort: config?.httpPort || questdbConfig.QUESTDB_HTTP_PORT, + pgPort: config?.pgPort || questdbConfig.QUESTDB_PG_PORT, + influxPort: config?.influxPort || questdbConfig.QUESTDB_INFLUX_PORT, + user: config?.user || questdbConfig.QUESTDB_USER, + password: config?.password || questdbConfig.QUESTDB_PASSWORD, + database: config?.database || questdbConfig.QUESTDB_DEFAULT_DATABASE, + tls: { + enabled: questdbConfig.QUESTDB_TLS_ENABLED, + verifyServerCert: questdbConfig.QUESTDB_TLS_VERIFY_SERVER_CERT, + ...config?.tls + }, + timeouts: { + connection: questdbConfig.QUESTDB_CONNECTION_TIMEOUT, + request: questdbConfig.QUESTDB_REQUEST_TIMEOUT, + ...config?.timeouts + }, + retryAttempts: questdbConfig.QUESTDB_RETRY_ATTEMPTS, + ...config + }; + } + + private buildPgPoolConfig(): any { + return { + host: this.config.host, + port: this.config.pgPort, + database: this.config.database, + user: this.config.user, + password: this.config.password, + connectionTimeoutMillis: this.config.timeouts?.connection, + query_timeout: this.config.timeouts?.request, + ssl: this.config.tls?.enabled ? { + rejectUnauthorized: this.config.tls.verifyServerCert + } : false, + min: 2, + max: 10 + }; + } + + private mapDataType(typeId: number): string { + // Map PostgreSQL type IDs to QuestDB types + const typeMap: Record = { + 16: 'BOOLEAN', + 20: 'LONG', + 21: 'INT', + 23: 'INT', + 25: 'STRING', + 700: 'FLOAT', + 701: 'DOUBLE', + 1043: 'STRING', + 1082: 'DATE', + 1114: 'TIMESTAMP', + 1184: 'TIMESTAMP' + }; + + return typeMap[typeId] || 'STRING'; + } + + private delay(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); + } +} diff --git a/libs/questdb-client/src/factory.ts b/libs/questdb-client/src/factory.ts index 0f626fb..54a1e97 100644 --- a/libs/questdb-client/src/factory.ts +++ b/libs/questdb-client/src/factory.ts @@ -1,63 +1,63 @@ -import { QuestDBClient } from './client'; -import { questdbConfig } from '@stock-bot/config'; -import type { QuestDBClientConfig, QuestDBConnectionOptions } from './types'; - -/** - * Factory function to create a QuestDB client instance - */ -export function createQuestDBClient( - config?: Partial, - options?: QuestDBConnectionOptions -): QuestDBClient { - return new QuestDBClient(config, options); -} - -/** - * Create a QuestDB client with default configuration - */ -export function createDefaultQuestDBClient(): QuestDBClient { - const config: Partial = { - host: questdbConfig.QUESTDB_HOST, - httpPort: questdbConfig.QUESTDB_HTTP_PORT, - pgPort: questdbConfig.QUESTDB_PG_PORT, - influxPort: questdbConfig.QUESTDB_INFLUX_PORT, - user: questdbConfig.QUESTDB_USER, - password: questdbConfig.QUESTDB_PASSWORD - }; - - return new QuestDBClient(config); -} - -/** - * Singleton QuestDB client instance - */ -let defaultClient: QuestDBClient | null = null; - -/** - * Get or create the default QuestDB client instance - */ -export function getQuestDBClient(): QuestDBClient { - if (!defaultClient) { - defaultClient = createDefaultQuestDBClient(); - } - return defaultClient; -} - -/** - * Connect to QuestDB using the default client - */ -export async function connectQuestDB(): Promise { - const client = getQuestDBClient(); - await client.connect(); - return client; -} - -/** - * Disconnect from QuestDB - */ -export async function disconnectQuestDB(): Promise { - if (defaultClient) { - await defaultClient.disconnect(); - defaultClient = null; - } -} +import { QuestDBClient } from './client'; +import { questdbConfig } from '@stock-bot/config'; +import type { QuestDBClientConfig, QuestDBConnectionOptions } from './types'; + +/** + * Factory function to create a QuestDB client instance + */ +export function createQuestDBClient( + config?: Partial, + options?: QuestDBConnectionOptions +): QuestDBClient { + return new QuestDBClient(config, options); +} + +/** + * Create a QuestDB client with default configuration + */ +export function createDefaultQuestDBClient(): QuestDBClient { + const config: Partial = { + host: questdbConfig.QUESTDB_HOST, + httpPort: questdbConfig.QUESTDB_HTTP_PORT, + pgPort: questdbConfig.QUESTDB_PG_PORT, + influxPort: questdbConfig.QUESTDB_INFLUX_PORT, + user: questdbConfig.QUESTDB_USER, + password: questdbConfig.QUESTDB_PASSWORD + }; + + return new QuestDBClient(config); +} + +/** + * Singleton QuestDB client instance + */ +let defaultClient: QuestDBClient | null = null; + +/** + * Get or create the default QuestDB client instance + */ +export function getQuestDBClient(): QuestDBClient { + if (!defaultClient) { + defaultClient = createDefaultQuestDBClient(); + } + return defaultClient; +} + +/** + * Connect to QuestDB using the default client + */ +export async function connectQuestDB(): Promise { + const client = getQuestDBClient(); + await client.connect(); + return client; +} + +/** + * Disconnect from QuestDB + */ +export async function disconnectQuestDB(): Promise { + if (defaultClient) { + await defaultClient.disconnect(); + defaultClient = null; + } +} diff --git a/libs/questdb-client/src/health.ts b/libs/questdb-client/src/health.ts index 0a5e1fb..29c008f 100644 --- a/libs/questdb-client/src/health.ts +++ b/libs/questdb-client/src/health.ts @@ -1,233 +1,233 @@ -import { getLogger } from '@stock-bot/logger'; -import type { HealthStatus, PerformanceMetrics, QueryResult } from './types'; - -// Interface to avoid circular dependency -interface QuestDBClientInterface { - query(sql: string, params?: any[]): Promise>; - isPgPoolHealthy(): boolean; -} - -/** - * QuestDB Health Monitor - * - * Monitors connection health, performance metrics, and provides - * automatic recovery capabilities for the QuestDB client. - */ -export class QuestDBHealthMonitor { - private readonly logger: ReturnType; - private healthCheckInterval: NodeJS.Timeout | null = null; - private lastHealthCheck: Date | null = null; - private performanceMetrics: PerformanceMetrics = { - totalQueries: 0, - successfulQueries: 0, - failedQueries: 0, - averageResponseTime: 0, - lastQueryTime: null, - connectionUptime: 0, - memoryUsage: 0 - }; - constructor(private readonly client: QuestDBClientInterface) { - this.logger = getLogger('questdb-health-monitor'); - } - - /** - * Start health monitoring - */ - public startMonitoring(intervalMs: number = 30000): void { - if (this.healthCheckInterval) { - this.stopMonitoring(); - } - - this.logger.info(`Starting health monitoring with ${intervalMs}ms interval`); - - this.healthCheckInterval = setInterval(async () => { - try { - await this.performHealthCheck(); - } catch (error) { - this.logger.error('Health check failed', error); - } - }, intervalMs); - - // Perform initial health check - this.performHealthCheck().catch(error => { - this.logger.error('Initial health check failed', error); - }); - } - - /** - * Stop health monitoring - */ - public stopMonitoring(): void { - if (this.healthCheckInterval) { - clearInterval(this.healthCheckInterval); - this.healthCheckInterval = null; - this.logger.info('Health monitoring stopped'); - } - } - - /** - * Perform a health check - */ - public async performHealthCheck(): Promise { - const startTime = Date.now(); - - try { - // Test basic connectivity with a simple query - await this.client.query('SELECT 1 as health_check'); - - const responseTime = Date.now() - startTime; - this.lastHealthCheck = new Date(); - - const status: HealthStatus = { - isHealthy: true, - lastCheck: this.lastHealthCheck, - responseTime, - message: 'Connection healthy', - details: { - pgPool: this.client.isPgPoolHealthy(), - httpEndpoint: true, // Will be implemented when HTTP client is added - uptime: this.getUptime() - } - }; - - this.logger.debug('Health check passed', { responseTime }); - return status; - - } catch (error) { - const responseTime = Date.now() - startTime; - this.lastHealthCheck = new Date(); - - const status: HealthStatus = { - isHealthy: false, - lastCheck: this.lastHealthCheck, - responseTime, - message: `Health check failed: ${error instanceof Error ? error.message : 'Unknown error'}`, - error: error instanceof Error ? error : new Error('Unknown error'), - details: { - pgPool: false, - httpEndpoint: false, - uptime: this.getUptime() - } - }; - - this.logger.error('Health check failed', { error, responseTime }); - return status; - } - } - - /** - * Get current health status - */ - public async getHealthStatus(): Promise { - if (!this.lastHealthCheck || Date.now() - this.lastHealthCheck.getTime() > 60000) { - return await this.performHealthCheck(); - } - - // Return cached status if recent - return { - isHealthy: true, - lastCheck: this.lastHealthCheck, - responseTime: 0, - message: 'Using cached health status', - details: { - pgPool: this.client.isPgPoolHealthy(), - httpEndpoint: true, - uptime: this.getUptime() - } - }; - } - - /** - * Record query performance metrics - */ - public recordQuery(success: boolean, responseTime: number): void { - this.performanceMetrics.totalQueries++; - this.performanceMetrics.lastQueryTime = new Date(); - - if (success) { - this.performanceMetrics.successfulQueries++; - } else { - this.performanceMetrics.failedQueries++; - } - - // Update rolling average response time - const totalResponseTime = this.performanceMetrics.averageResponseTime * - (this.performanceMetrics.totalQueries - 1) + responseTime; - this.performanceMetrics.averageResponseTime = - totalResponseTime / this.performanceMetrics.totalQueries; - - // Update memory usage - this.performanceMetrics.memoryUsage = process.memoryUsage().heapUsed; - } - - /** - * Get performance metrics - */ - public getPerformanceMetrics(): PerformanceMetrics { - return { ...this.performanceMetrics }; - } - - /** - * Get connection uptime in seconds - */ - private getUptime(): number { - return Math.floor(process.uptime()); - } - - /** - * Reset performance metrics - */ - public resetMetrics(): void { - this.performanceMetrics = { - totalQueries: 0, - successfulQueries: 0, - failedQueries: 0, - averageResponseTime: 0, - lastQueryTime: null, - connectionUptime: this.getUptime(), - memoryUsage: process.memoryUsage().heapUsed - }; - - this.logger.info('Performance metrics reset'); - } - - /** - * Get health summary for monitoring dashboards - */ - public async getHealthSummary(): Promise<{ - status: HealthStatus; - metrics: PerformanceMetrics; - recommendations: string[]; - }> { - const status = await this.getHealthStatus(); - const metrics = this.getPerformanceMetrics(); - const recommendations: string[] = []; - - // Generate recommendations based on metrics - if (metrics.failedQueries > metrics.successfulQueries * 0.1) { - recommendations.push('High error rate detected - check query patterns'); - } - - if (metrics.averageResponseTime > 1000) { - recommendations.push('High response times - consider query optimization'); - } - - if (metrics.memoryUsage > 100 * 1024 * 1024) { // 100MB - recommendations.push('High memory usage - monitor for memory leaks'); - } - - return { - status, - metrics, - recommendations - }; - } - - /** - * Cleanup resources - */ - public destroy(): void { - this.stopMonitoring(); - this.logger.info('Health monitor destroyed'); - } -} +import { getLogger } from '@stock-bot/logger'; +import type { HealthStatus, PerformanceMetrics, QueryResult } from './types'; + +// Interface to avoid circular dependency +interface QuestDBClientInterface { + query(sql: string, params?: any[]): Promise>; + isPgPoolHealthy(): boolean; +} + +/** + * QuestDB Health Monitor + * + * Monitors connection health, performance metrics, and provides + * automatic recovery capabilities for the QuestDB client. + */ +export class QuestDBHealthMonitor { + private readonly logger: ReturnType; + private healthCheckInterval: NodeJS.Timeout | null = null; + private lastHealthCheck: Date | null = null; + private performanceMetrics: PerformanceMetrics = { + totalQueries: 0, + successfulQueries: 0, + failedQueries: 0, + averageResponseTime: 0, + lastQueryTime: null, + connectionUptime: 0, + memoryUsage: 0 + }; + constructor(private readonly client: QuestDBClientInterface) { + this.logger = getLogger('questdb-health-monitor'); + } + + /** + * Start health monitoring + */ + public startMonitoring(intervalMs: number = 30000): void { + if (this.healthCheckInterval) { + this.stopMonitoring(); + } + + this.logger.info(`Starting health monitoring with ${intervalMs}ms interval`); + + this.healthCheckInterval = setInterval(async () => { + try { + await this.performHealthCheck(); + } catch (error) { + this.logger.error('Health check failed', error); + } + }, intervalMs); + + // Perform initial health check + this.performHealthCheck().catch(error => { + this.logger.error('Initial health check failed', error); + }); + } + + /** + * Stop health monitoring + */ + public stopMonitoring(): void { + if (this.healthCheckInterval) { + clearInterval(this.healthCheckInterval); + this.healthCheckInterval = null; + this.logger.info('Health monitoring stopped'); + } + } + + /** + * Perform a health check + */ + public async performHealthCheck(): Promise { + const startTime = Date.now(); + + try { + // Test basic connectivity with a simple query + await this.client.query('SELECT 1 as health_check'); + + const responseTime = Date.now() - startTime; + this.lastHealthCheck = new Date(); + + const status: HealthStatus = { + isHealthy: true, + lastCheck: this.lastHealthCheck, + responseTime, + message: 'Connection healthy', + details: { + pgPool: this.client.isPgPoolHealthy(), + httpEndpoint: true, // Will be implemented when HTTP client is added + uptime: this.getUptime() + } + }; + + this.logger.debug('Health check passed', { responseTime }); + return status; + + } catch (error) { + const responseTime = Date.now() - startTime; + this.lastHealthCheck = new Date(); + + const status: HealthStatus = { + isHealthy: false, + lastCheck: this.lastHealthCheck, + responseTime, + message: `Health check failed: ${error instanceof Error ? error.message : 'Unknown error'}`, + error: error instanceof Error ? error : new Error('Unknown error'), + details: { + pgPool: false, + httpEndpoint: false, + uptime: this.getUptime() + } + }; + + this.logger.error('Health check failed', { error, responseTime }); + return status; + } + } + + /** + * Get current health status + */ + public async getHealthStatus(): Promise { + if (!this.lastHealthCheck || Date.now() - this.lastHealthCheck.getTime() > 60000) { + return await this.performHealthCheck(); + } + + // Return cached status if recent + return { + isHealthy: true, + lastCheck: this.lastHealthCheck, + responseTime: 0, + message: 'Using cached health status', + details: { + pgPool: this.client.isPgPoolHealthy(), + httpEndpoint: true, + uptime: this.getUptime() + } + }; + } + + /** + * Record query performance metrics + */ + public recordQuery(success: boolean, responseTime: number): void { + this.performanceMetrics.totalQueries++; + this.performanceMetrics.lastQueryTime = new Date(); + + if (success) { + this.performanceMetrics.successfulQueries++; + } else { + this.performanceMetrics.failedQueries++; + } + + // Update rolling average response time + const totalResponseTime = this.performanceMetrics.averageResponseTime * + (this.performanceMetrics.totalQueries - 1) + responseTime; + this.performanceMetrics.averageResponseTime = + totalResponseTime / this.performanceMetrics.totalQueries; + + // Update memory usage + this.performanceMetrics.memoryUsage = process.memoryUsage().heapUsed; + } + + /** + * Get performance metrics + */ + public getPerformanceMetrics(): PerformanceMetrics { + return { ...this.performanceMetrics }; + } + + /** + * Get connection uptime in seconds + */ + private getUptime(): number { + return Math.floor(process.uptime()); + } + + /** + * Reset performance metrics + */ + public resetMetrics(): void { + this.performanceMetrics = { + totalQueries: 0, + successfulQueries: 0, + failedQueries: 0, + averageResponseTime: 0, + lastQueryTime: null, + connectionUptime: this.getUptime(), + memoryUsage: process.memoryUsage().heapUsed + }; + + this.logger.info('Performance metrics reset'); + } + + /** + * Get health summary for monitoring dashboards + */ + public async getHealthSummary(): Promise<{ + status: HealthStatus; + metrics: PerformanceMetrics; + recommendations: string[]; + }> { + const status = await this.getHealthStatus(); + const metrics = this.getPerformanceMetrics(); + const recommendations: string[] = []; + + // Generate recommendations based on metrics + if (metrics.failedQueries > metrics.successfulQueries * 0.1) { + recommendations.push('High error rate detected - check query patterns'); + } + + if (metrics.averageResponseTime > 1000) { + recommendations.push('High response times - consider query optimization'); + } + + if (metrics.memoryUsage > 100 * 1024 * 1024) { // 100MB + recommendations.push('High memory usage - monitor for memory leaks'); + } + + return { + status, + metrics, + recommendations + }; + } + + /** + * Cleanup resources + */ + public destroy(): void { + this.stopMonitoring(); + this.logger.info('Health monitor destroyed'); + } +} diff --git a/libs/questdb-client/src/index.ts b/libs/questdb-client/src/index.ts index 7e40695..e45b700 100644 --- a/libs/questdb-client/src/index.ts +++ b/libs/questdb-client/src/index.ts @@ -1,32 +1,32 @@ -/** - * QuestDB Client Library for Stock Bot - * - * Provides high-performance time-series data access with support for - * InfluxDB Line Protocol, SQL queries, and PostgreSQL wire protocol. - */ - -export { QuestDBClient } from './client'; -export { QuestDBHealthMonitor } from './health'; -export { QuestDBQueryBuilder } from './query-builder'; -export { QuestDBInfluxWriter } from './influx-writer'; -export { QuestDBSchemaManager } from './schema'; - -// Types -export type { - QuestDBClientConfig, - QuestDBConnectionOptions, - QuestDBHealthStatus, - QuestDBMetrics, - TableNames, - OHLCVData, - TradeData, - QuoteData, - IndicatorData, - PerformanceData, - RiskMetrics, - QueryResult, - InsertResult -} from './types'; - -// Utils -export { createQuestDBClient, getQuestDBClient } from './factory'; +/** + * QuestDB Client Library for Stock Bot + * + * Provides high-performance time-series data access with support for + * InfluxDB Line Protocol, SQL queries, and PostgreSQL wire protocol. + */ + +export { QuestDBClient } from './client'; +export { QuestDBHealthMonitor } from './health'; +export { QuestDBQueryBuilder } from './query-builder'; +export { QuestDBInfluxWriter } from './influx-writer'; +export { QuestDBSchemaManager } from './schema'; + +// Types +export type { + QuestDBClientConfig, + QuestDBConnectionOptions, + QuestDBHealthStatus, + QuestDBMetrics, + TableNames, + OHLCVData, + TradeData, + QuoteData, + IndicatorData, + PerformanceData, + RiskMetrics, + QueryResult, + InsertResult +} from './types'; + +// Utils +export { createQuestDBClient, getQuestDBClient } from './factory'; diff --git a/libs/questdb-client/src/influx-writer.ts b/libs/questdb-client/src/influx-writer.ts index 43692bf..a788f7b 100644 --- a/libs/questdb-client/src/influx-writer.ts +++ b/libs/questdb-client/src/influx-writer.ts @@ -1,436 +1,436 @@ -import { getLogger } from '@stock-bot/logger'; -import type { - InfluxLineData, - InfluxWriteOptions, - BaseTimeSeriesData -} from './types'; - -// Interface to avoid circular dependency -interface QuestDBClientInterface { - getHttpUrl(): string; -} - -/** - * QuestDB InfluxDB Line Protocol Writer - * - * Provides high-performance data ingestion using InfluxDB Line Protocol - * which QuestDB supports natively for optimal time-series data insertion. - */ -export class QuestDBInfluxWriter { - private readonly logger: ReturnType; - private writeBuffer: string[] = []; - private flushTimer: NodeJS.Timeout | null = null; - private readonly defaultOptions: Required = { - batchSize: 1000, - flushInterval: 5000, - autoFlush: true, - precision: 'ms', - retryAttempts: 3, - retryDelay: 1000 - }; - constructor(private readonly client: QuestDBClientInterface) { - this.logger = getLogger('questdb-influx-writer'); - } - - /** - * Write single data point using InfluxDB Line Protocol - */ - public async writePoint( - measurement: string, - tags: Record, - fields: Record, - timestamp?: Date, - options?: Partial - ): Promise { - const line = this.buildLineProtocol(measurement, tags, fields, timestamp); - const opts = { ...this.defaultOptions, ...options }; - - if (opts.autoFlush && this.writeBuffer.length === 0) { - // Single point write - send immediately - await this.sendLines([line], opts); - } else { - // Add to buffer - this.writeBuffer.push(line); - - if (opts.autoFlush) { - this.scheduleFlush(opts); - } - - // Flush if buffer is full - if (this.writeBuffer.length >= opts.batchSize) { - await this.flush(opts); - } - } - } - - /** - * Write multiple data points - */ - public async writePoints( - data: InfluxLineData[], - options?: Partial - ): Promise { - const opts = { ...this.defaultOptions, ...options }; - const lines = data.map(point => - this.buildLineProtocol(point.measurement, point.tags, point.fields, point.timestamp) - ); - - if (opts.autoFlush) { - // Send immediately for batch writes - await this.sendLines(lines, opts); - } else { - // Add to buffer - this.writeBuffer.push(...lines); - - // Flush if buffer exceeds batch size - while (this.writeBuffer.length >= opts.batchSize) { - const batch = this.writeBuffer.splice(0, opts.batchSize); - await this.sendLines(batch, opts); - } - } - } - - /** - * Write OHLCV data optimized for QuestDB - */ - public async writeOHLCV( - symbol: string, - exchange: string, - data: { - timestamp: Date; - open: number; - high: number; - low: number; - close: number; - volume: number; - }[], - options?: Partial - ): Promise { - const influxData: InfluxLineData[] = data.map(candle => ({ - measurement: 'ohlcv_data', - tags: { - symbol, - exchange, - data_source: 'market_feed' - }, - fields: { - open: candle.open, - high: candle.high, - low: candle.low, - close: candle.close, - volume: candle.volume - }, - timestamp: candle.timestamp - })); - - await this.writePoints(influxData, options); - } - - /** - * Write market analytics data - */ - public async writeMarketAnalytics( - symbol: string, - exchange: string, - analytics: { - timestamp: Date; - rsi?: number; - macd?: number; - signal?: number; - histogram?: number; - bollinger_upper?: number; - bollinger_lower?: number; - volume_sma?: number; - }, - options?: Partial - ): Promise { - const fields: Record = {}; - - // Only include defined values - Object.entries(analytics).forEach(([key, value]) => { - if (key !== 'timestamp' && value !== undefined && value !== null) { - fields[key] = value as number; - } - }); - - if (Object.keys(fields).length === 0) { - this.logger.warn('No analytics fields to write', { symbol, timestamp: analytics.timestamp }); - return; - } - - await this.writePoint( - 'market_analytics', - { symbol, exchange }, - fields, - analytics.timestamp, - options - ); - } - - /** - * Write trade execution data - */ - public async writeTradeExecution( - execution: { - symbol: string; - side: 'buy' | 'sell'; - quantity: number; - price: number; - timestamp: Date; - executionTime: number; - orderId?: string; - strategy?: string; - }, - options?: Partial - ): Promise { - const tags: Record = { - symbol: execution.symbol, - side: execution.side - }; - - if (execution.orderId) { - tags.order_id = execution.orderId; - } - - if (execution.strategy) { - tags.strategy = execution.strategy; - } - - await this.writePoint( - 'trade_executions', - tags, - { - quantity: execution.quantity, - price: execution.price, - execution_time: execution.executionTime - }, - execution.timestamp, - options - ); - } - - /** - * Write performance metrics - */ - public async writePerformanceMetrics( - metrics: { - timestamp: Date; - operation: string; - responseTime: number; - success: boolean; - errorCode?: string; - }, - options?: Partial - ): Promise { - const tags: Record = { - operation: metrics.operation, - success: metrics.success.toString() - }; - - if (metrics.errorCode) { - tags.error_code = metrics.errorCode; - } - - await this.writePoint( - 'performance_metrics', - tags, - { response_time: metrics.responseTime }, - metrics.timestamp, - options - ); - } - - /** - * Manually flush the write buffer - */ - public async flush(options?: Partial): Promise { - if (this.writeBuffer.length === 0) { - return; - } - - const opts = { ...this.defaultOptions, ...options }; - const lines = this.writeBuffer.splice(0); // Clear buffer - - if (this.flushTimer) { - clearTimeout(this.flushTimer); - this.flushTimer = null; - } - - await this.sendLines(lines, opts); - } - - /** - * Get current buffer size - */ - public getBufferSize(): number { - return this.writeBuffer.length; - } - - /** - * Clear the buffer without writing - */ - public clearBuffer(): void { - this.writeBuffer.length = 0; - if (this.flushTimer) { - clearTimeout(this.flushTimer); - this.flushTimer = null; - } - } - - /** - * Build InfluxDB Line Protocol string - */ - private buildLineProtocol( - measurement: string, - tags: Record, - fields: Record, - timestamp?: Date - ): string { - // Escape special characters in measurement name - const escapedMeasurement = measurement.replace(/[, =]/g, '\\$&'); - - // Build tags string - const tagString = Object.entries(tags) - .filter(([_, value]) => value !== undefined && value !== null) - .map(([key, value]) => `${this.escapeTagKey(key)}=${this.escapeTagValue(value)}`) - .join(','); - - // Build fields string - const fieldString = Object.entries(fields) - .filter(([_, value]) => value !== undefined && value !== null) - .map(([key, value]) => `${this.escapeFieldKey(key)}=${this.formatFieldValue(value)}`) - .join(','); - - // Build timestamp - const timestampString = timestamp ? - Math.floor(timestamp.getTime() * 1000000).toString() : // Convert to nanoseconds - ''; - - // Combine parts - let line = escapedMeasurement; - if (tagString) { - line += `,${tagString}`; - } - line += ` ${fieldString}`; - if (timestampString) { - line += ` ${timestampString}`; - } - - return line; - } - - /** - * Send lines to QuestDB via HTTP endpoint - */ - private async sendLines( - lines: string[], - options: Required - ): Promise { - if (lines.length === 0) { - return; - } - - const payload = lines.join('\n'); - let attempt = 0; - - while (attempt <= options.retryAttempts) { - try { - // QuestDB InfluxDB Line Protocol endpoint - const response = await fetch(`${this.client.getHttpUrl()}/write`, { - method: 'POST', - headers: { - 'Content-Type': 'text/plain', - }, - body: payload - }); - - if (!response.ok) { - throw new Error(`HTTP ${response.status}: ${response.statusText}`); - } - - this.logger.debug(`Successfully wrote ${lines.length} lines to QuestDB`); - return; - - } catch (error) { - attempt++; - this.logger.error(`Write attempt ${attempt} failed`, { - error, - linesCount: lines.length, - willRetry: attempt <= options.retryAttempts - }); - - if (attempt <= options.retryAttempts) { - await this.sleep(options.retryDelay * attempt); // Exponential backoff - } else { - throw new Error(`Failed to write to QuestDB after ${options.retryAttempts} attempts: $error`); - } - } - } - } - - /** - * Schedule automatic flush - */ - private scheduleFlush(options: Required): void { - if (this.flushTimer || !options.autoFlush) { - return; - } - - this.flushTimer = setTimeout(async () => { - try { - await this.flush(options); - } catch (error) { - this.logger.error('Scheduled flush failed', error); - } - }, options.flushInterval); - } - - /** - * Format field value for InfluxDB Line Protocol - */ - private formatFieldValue(value: number | string | boolean): string { - if (typeof value === 'string') { - return `"${value.replace(/"/g, '\\"')}"`; - } else if (typeof value === 'boolean') { - return value ? 'true' : 'false'; - } else { - return value.toString(); - } - } - - /** - * Escape tag key - */ - private escapeTagKey(key: string): string { - return key.replace(/[, =]/g, '\\$&'); - } - - /** - * Escape tag value - */ - private escapeTagValue(value: string): string { - return value.replace(/[, =]/g, '\\$&'); - } - - /** - * Escape field key - */ - private escapeFieldKey(key: string): string { - return key.replace(/[, =]/g, '\\$&'); - } - - /** - * Sleep utility - */ - private sleep(ms: number): Promise { - return new Promise(resolve => setTimeout(resolve, ms)); - } - - /** - * Cleanup resources - */ - public destroy(): void { - this.clearBuffer(); - this.logger.info('InfluxDB writer destroyed'); - } -} +import { getLogger } from '@stock-bot/logger'; +import type { + InfluxLineData, + InfluxWriteOptions, + BaseTimeSeriesData +} from './types'; + +// Interface to avoid circular dependency +interface QuestDBClientInterface { + getHttpUrl(): string; +} + +/** + * QuestDB InfluxDB Line Protocol Writer + * + * Provides high-performance data ingestion using InfluxDB Line Protocol + * which QuestDB supports natively for optimal time-series data insertion. + */ +export class QuestDBInfluxWriter { + private readonly logger: ReturnType; + private writeBuffer: string[] = []; + private flushTimer: NodeJS.Timeout | null = null; + private readonly defaultOptions: Required = { + batchSize: 1000, + flushInterval: 5000, + autoFlush: true, + precision: 'ms', + retryAttempts: 3, + retryDelay: 1000 + }; + constructor(private readonly client: QuestDBClientInterface) { + this.logger = getLogger('questdb-influx-writer'); + } + + /** + * Write single data point using InfluxDB Line Protocol + */ + public async writePoint( + measurement: string, + tags: Record, + fields: Record, + timestamp?: Date, + options?: Partial + ): Promise { + const line = this.buildLineProtocol(measurement, tags, fields, timestamp); + const opts = { ...this.defaultOptions, ...options }; + + if (opts.autoFlush && this.writeBuffer.length === 0) { + // Single point write - send immediately + await this.sendLines([line], opts); + } else { + // Add to buffer + this.writeBuffer.push(line); + + if (opts.autoFlush) { + this.scheduleFlush(opts); + } + + // Flush if buffer is full + if (this.writeBuffer.length >= opts.batchSize) { + await this.flush(opts); + } + } + } + + /** + * Write multiple data points + */ + public async writePoints( + data: InfluxLineData[], + options?: Partial + ): Promise { + const opts = { ...this.defaultOptions, ...options }; + const lines = data.map(point => + this.buildLineProtocol(point.measurement, point.tags, point.fields, point.timestamp) + ); + + if (opts.autoFlush) { + // Send immediately for batch writes + await this.sendLines(lines, opts); + } else { + // Add to buffer + this.writeBuffer.push(...lines); + + // Flush if buffer exceeds batch size + while (this.writeBuffer.length >= opts.batchSize) { + const batch = this.writeBuffer.splice(0, opts.batchSize); + await this.sendLines(batch, opts); + } + } + } + + /** + * Write OHLCV data optimized for QuestDB + */ + public async writeOHLCV( + symbol: string, + exchange: string, + data: { + timestamp: Date; + open: number; + high: number; + low: number; + close: number; + volume: number; + }[], + options?: Partial + ): Promise { + const influxData: InfluxLineData[] = data.map(candle => ({ + measurement: 'ohlcv_data', + tags: { + symbol, + exchange, + data_source: 'market_feed' + }, + fields: { + open: candle.open, + high: candle.high, + low: candle.low, + close: candle.close, + volume: candle.volume + }, + timestamp: candle.timestamp + })); + + await this.writePoints(influxData, options); + } + + /** + * Write market analytics data + */ + public async writeMarketAnalytics( + symbol: string, + exchange: string, + analytics: { + timestamp: Date; + rsi?: number; + macd?: number; + signal?: number; + histogram?: number; + bollinger_upper?: number; + bollinger_lower?: number; + volume_sma?: number; + }, + options?: Partial + ): Promise { + const fields: Record = {}; + + // Only include defined values + Object.entries(analytics).forEach(([key, value]) => { + if (key !== 'timestamp' && value !== undefined && value !== null) { + fields[key] = value as number; + } + }); + + if (Object.keys(fields).length === 0) { + this.logger.warn('No analytics fields to write', { symbol, timestamp: analytics.timestamp }); + return; + } + + await this.writePoint( + 'market_analytics', + { symbol, exchange }, + fields, + analytics.timestamp, + options + ); + } + + /** + * Write trade execution data + */ + public async writeTradeExecution( + execution: { + symbol: string; + side: 'buy' | 'sell'; + quantity: number; + price: number; + timestamp: Date; + executionTime: number; + orderId?: string; + strategy?: string; + }, + options?: Partial + ): Promise { + const tags: Record = { + symbol: execution.symbol, + side: execution.side + }; + + if (execution.orderId) { + tags.order_id = execution.orderId; + } + + if (execution.strategy) { + tags.strategy = execution.strategy; + } + + await this.writePoint( + 'trade_executions', + tags, + { + quantity: execution.quantity, + price: execution.price, + execution_time: execution.executionTime + }, + execution.timestamp, + options + ); + } + + /** + * Write performance metrics + */ + public async writePerformanceMetrics( + metrics: { + timestamp: Date; + operation: string; + responseTime: number; + success: boolean; + errorCode?: string; + }, + options?: Partial + ): Promise { + const tags: Record = { + operation: metrics.operation, + success: metrics.success.toString() + }; + + if (metrics.errorCode) { + tags.error_code = metrics.errorCode; + } + + await this.writePoint( + 'performance_metrics', + tags, + { response_time: metrics.responseTime }, + metrics.timestamp, + options + ); + } + + /** + * Manually flush the write buffer + */ + public async flush(options?: Partial): Promise { + if (this.writeBuffer.length === 0) { + return; + } + + const opts = { ...this.defaultOptions, ...options }; + const lines = this.writeBuffer.splice(0); // Clear buffer + + if (this.flushTimer) { + clearTimeout(this.flushTimer); + this.flushTimer = null; + } + + await this.sendLines(lines, opts); + } + + /** + * Get current buffer size + */ + public getBufferSize(): number { + return this.writeBuffer.length; + } + + /** + * Clear the buffer without writing + */ + public clearBuffer(): void { + this.writeBuffer.length = 0; + if (this.flushTimer) { + clearTimeout(this.flushTimer); + this.flushTimer = null; + } + } + + /** + * Build InfluxDB Line Protocol string + */ + private buildLineProtocol( + measurement: string, + tags: Record, + fields: Record, + timestamp?: Date + ): string { + // Escape special characters in measurement name + const escapedMeasurement = measurement.replace(/[, =]/g, '\\$&'); + + // Build tags string + const tagString = Object.entries(tags) + .filter(([_, value]) => value !== undefined && value !== null) + .map(([key, value]) => `${this.escapeTagKey(key)}=${this.escapeTagValue(value)}`) + .join(','); + + // Build fields string + const fieldString = Object.entries(fields) + .filter(([_, value]) => value !== undefined && value !== null) + .map(([key, value]) => `${this.escapeFieldKey(key)}=${this.formatFieldValue(value)}`) + .join(','); + + // Build timestamp + const timestampString = timestamp ? + Math.floor(timestamp.getTime() * 1000000).toString() : // Convert to nanoseconds + ''; + + // Combine parts + let line = escapedMeasurement; + if (tagString) { + line += `,${tagString}`; + } + line += ` ${fieldString}`; + if (timestampString) { + line += ` ${timestampString}`; + } + + return line; + } + + /** + * Send lines to QuestDB via HTTP endpoint + */ + private async sendLines( + lines: string[], + options: Required + ): Promise { + if (lines.length === 0) { + return; + } + + const payload = lines.join('\n'); + let attempt = 0; + + while (attempt <= options.retryAttempts) { + try { + // QuestDB InfluxDB Line Protocol endpoint + const response = await fetch(`${this.client.getHttpUrl()}/write`, { + method: 'POST', + headers: { + 'Content-Type': 'text/plain', + }, + body: payload + }); + + if (!response.ok) { + throw new Error(`HTTP ${response.status}: ${response.statusText}`); + } + + this.logger.debug(`Successfully wrote ${lines.length} lines to QuestDB`); + return; + + } catch (error) { + attempt++; + this.logger.error(`Write attempt ${attempt} failed`, { + error, + linesCount: lines.length, + willRetry: attempt <= options.retryAttempts + }); + + if (attempt <= options.retryAttempts) { + await this.sleep(options.retryDelay * attempt); // Exponential backoff + } else { + throw new Error(`Failed to write to QuestDB after ${options.retryAttempts} attempts: $error`); + } + } + } + } + + /** + * Schedule automatic flush + */ + private scheduleFlush(options: Required): void { + if (this.flushTimer || !options.autoFlush) { + return; + } + + this.flushTimer = setTimeout(async () => { + try { + await this.flush(options); + } catch (error) { + this.logger.error('Scheduled flush failed', error); + } + }, options.flushInterval); + } + + /** + * Format field value for InfluxDB Line Protocol + */ + private formatFieldValue(value: number | string | boolean): string { + if (typeof value === 'string') { + return `"${value.replace(/"/g, '\\"')}"`; + } else if (typeof value === 'boolean') { + return value ? 'true' : 'false'; + } else { + return value.toString(); + } + } + + /** + * Escape tag key + */ + private escapeTagKey(key: string): string { + return key.replace(/[, =]/g, '\\$&'); + } + + /** + * Escape tag value + */ + private escapeTagValue(value: string): string { + return value.replace(/[, =]/g, '\\$&'); + } + + /** + * Escape field key + */ + private escapeFieldKey(key: string): string { + return key.replace(/[, =]/g, '\\$&'); + } + + /** + * Sleep utility + */ + private sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); + } + + /** + * Cleanup resources + */ + public destroy(): void { + this.clearBuffer(); + this.logger.info('InfluxDB writer destroyed'); + } +} diff --git a/libs/questdb-client/src/query-builder.ts b/libs/questdb-client/src/query-builder.ts index 938f27a..db950df 100644 --- a/libs/questdb-client/src/query-builder.ts +++ b/libs/questdb-client/src/query-builder.ts @@ -1,368 +1,368 @@ -import { getLogger } from '@stock-bot/logger'; -import type { - QueryResult, - TimeSeriesQuery, - AggregationQuery, - TimeRange, - TableNames -} from './types'; - -// Interface to avoid circular dependency -interface QuestDBClientInterface { - query(sql: string, params?: any[]): Promise>; -} - -/** - * QuestDB Query Builder - * - * Provides a fluent interface for building optimized time-series queries - * with support for QuestDB-specific functions and optimizations. - */ -export class QuestDBQueryBuilder { - private readonly logger: ReturnType; - private query!: { - select: string[]; - from: string; - where: string[]; - groupBy: string[]; - orderBy: string[]; - limit?: number; - sampleBy?: string; - latestBy?: string[]; - timeRange?: TimeRange; - }; - constructor(private readonly client: QuestDBClientInterface) { - this.logger = getLogger('questdb-query-builder'); - this.reset(); - } - - /** - * Reset the query builder - */ - private reset(): QuestDBQueryBuilder { - this.query = { - select: [], - from: '', - where: [], - groupBy: [], - orderBy: [], - sampleBy: undefined, - latestBy: undefined, - timeRange: undefined - }; - return this; - } - /** - * Start a new query - */ - public static create(client: QuestDBClientInterface): QuestDBQueryBuilder { - return new QuestDBQueryBuilder(client); - } - - /** - * Select columns - */ - public select(...columns: string[]): QuestDBQueryBuilder { - this.query.select.push(...columns); - return this; - } - - /** - * Select with aggregation functions - */ - public selectAgg(aggregations: Record): QuestDBQueryBuilder { - Object.entries(aggregations).forEach(([alias, expression]) => { - this.query.select.push(`${expression} as ${alias}`); - }); - return this; - } - - /** - * From table - */ - public from(table: TableNames | string): QuestDBQueryBuilder { - this.query.from = table; - return this; - } - - /** - * Where condition - */ - public where(condition: string): QuestDBQueryBuilder { - this.query.where.push(condition); - return this; - } - - /** - * Where symbol equals - */ - public whereSymbol(symbol: string): QuestDBQueryBuilder { - this.query.where.push(`symbol = '${symbol}'`); - return this; - } - - /** - * Where symbols in list - */ - public whereSymbolIn(symbols: string[]): QuestDBQueryBuilder { - const symbolList = symbols.map(s => `'${s}'`).join(', '); - this.query.where.push(`symbol IN (${symbolList})`); - return this; - } - - /** - * Where exchange equals - */ - public whereExchange(exchange: string): QuestDBQueryBuilder { - this.query.where.push(`exchange = '${exchange}'`); - return this; - } - - /** - * Time range filter - */ - public whereTimeRange(startTime: Date, endTime: Date): QuestDBQueryBuilder { - this.query.timeRange = { startTime, endTime }; - this.query.where.push( - `timestamp >= '${startTime.toISOString()}' AND timestamp <= '${endTime.toISOString()}'` - ); - return this; - } - - /** - * Last N hours - */ - public whereLastHours(hours: number): QuestDBQueryBuilder { - this.query.where.push(`timestamp > dateadd('h', -${hours}, now())`); - return this; - } - - /** - * Last N days - */ - public whereLastDays(days: number): QuestDBQueryBuilder { - this.query.where.push(`timestamp > dateadd('d', -${days}, now())`); - return this; - } - - /** - * Group by columns - */ - public groupBy(...columns: string[]): QuestDBQueryBuilder { - this.query.groupBy.push(...columns); - return this; - } - - /** - * Order by column - */ - public orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): QuestDBQueryBuilder { - this.query.orderBy.push(`${column} ${direction}`); - return this; - } - - /** - * Order by timestamp descending (most recent first) - */ - public orderByTimeDesc(): QuestDBQueryBuilder { - this.query.orderBy.push('timestamp DESC'); - return this; - } - - /** - * Limit results - */ - public limit(count: number): QuestDBQueryBuilder { - this.query.limit = count; - return this; - } - - /** - * Sample by time interval (QuestDB specific) - */ - public sampleBy(interval: string): QuestDBQueryBuilder { - this.query.sampleBy = interval; - return this; - } - - /** - * Latest by columns (QuestDB specific) - */ - public latestBy(...columns: string[]): QuestDBQueryBuilder { - this.query.latestBy = columns; - return this; - } - - /** - * Build and execute the query - */ - public async execute(): Promise> { - const sql = this.build(); - this.logger.debug('Executing query', { sql }); - - try { - const result = await this.client.query(sql); - this.reset(); // Reset for next query - return result; - } catch (error) { - this.logger.error('Query execution failed', { sql, error }); - this.reset(); // Reset even on error - throw error; - } - } - - /** - * Build the SQL query string - */ - public build(): string { - if (!this.query.from) { - throw new Error('FROM clause is required'); - } - - if (this.query.select.length === 0) { - this.query.select.push('*'); - } - - let sql = `SELECT ${this.query.select.join(', ')} FROM ${this.query.from}`; - - // Add WHERE clause - if (this.query.where.length > 0) { - sql += ` WHERE ${this.query.where.join(' AND ')}`; - } - - // Add LATEST BY (QuestDB specific - must come before GROUP BY) - if (this.query.latestBy && this.query.latestBy.length > 0) { - sql += ` LATEST BY ${this.query.latestBy.join(', ')}`; - } - - // Add SAMPLE BY (QuestDB specific) - if (this.query.sampleBy) { - sql += ` SAMPLE BY ${this.query.sampleBy}`; - } - - // Add GROUP BY - if (this.query.groupBy.length > 0) { - sql += ` GROUP BY ${this.query.groupBy.join(', ')}`; - } - - // Add ORDER BY - if (this.query.orderBy.length > 0) { - sql += ` ORDER BY ${this.query.orderBy.join(', ')}`; - } - - // Add LIMIT - if (this.query.limit) { - sql += ` LIMIT ${this.query.limit}`; - } - - return sql; - } - - /** - * Get the built query without executing - */ - public toSQL(): string { - return this.build(); - } - - // Predefined query methods for common use cases - /** - * Get latest OHLCV data for symbols - */ - public static latestOHLCV( - client: QuestDBClientInterface, - symbols: string[], - exchange?: string - ): QuestDBQueryBuilder { - const builder = QuestDBQueryBuilder.create(client) - .select('symbol', 'timestamp', 'open', 'high', 'low', 'close', 'volume') - .from('ohlcv_data') - .whereSymbolIn(symbols) - .latestBy('symbol') - .orderByTimeDesc(); - - if (exchange) { - builder.whereExchange(exchange); - } - - return builder; - } - /** - * Get OHLCV data with time sampling - */ - public static ohlcvTimeSeries( - client: QuestDBClientInterface, - symbol: string, - interval: string, - hours: number = 24 - ): QuestDBQueryBuilder { - return QuestDBQueryBuilder.create(client) - .selectAgg({ - 'first_open': 'first(open)', - 'max_high': 'max(high)', - 'min_low': 'min(low)', - 'last_close': 'last(close)', - 'sum_volume': 'sum(volume)' - }) - .from('ohlcv_data') - .whereSymbol(symbol) - .whereLastHours(hours) - .sampleBy(interval) - .orderByTimeDesc(); - } - /** - * Get market analytics data - */ - public static marketAnalytics( - client: QuestDBClientInterface, - symbols: string[], - hours: number = 1 - ): QuestDBQueryBuilder { - return QuestDBQueryBuilder.create(client) - .select('symbol', 'timestamp', 'rsi', 'macd', 'bollinger_upper', 'bollinger_lower', 'volume_sma') - .from('market_analytics') - .whereSymbolIn(symbols) - .whereLastHours(hours) - .orderBy('symbol') - .orderByTimeDesc(); - } - /** - * Get performance metrics for a time range - */ - public static performanceMetrics( - client: QuestDBClientInterface, - startTime: Date, - endTime: Date - ): QuestDBQueryBuilder { - return QuestDBQueryBuilder.create(client) - .selectAgg({ - 'total_trades': 'count(*)', - 'avg_response_time': 'avg(response_time)', - 'max_response_time': 'max(response_time)', - 'error_rate': 'sum(case when success = false then 1 else 0 end) * 100.0 / count(*)' - }) - .from('performance_metrics') - .whereTimeRange(startTime, endTime) - .sampleBy('1m'); - } - /** - * Get trade execution data - */ - public static tradeExecutions( - client: QuestDBClientInterface, - symbol?: string, - hours: number = 24 - ): QuestDBQueryBuilder { - const builder = QuestDBQueryBuilder.create(client) - .select('symbol', 'timestamp', 'side', 'quantity', 'price', 'execution_time') - .from('trade_executions') - .whereLastHours(hours) - .orderByTimeDesc(); - - if (symbol) { - builder.whereSymbol(symbol); - } - - return builder; - } -} +import { getLogger } from '@stock-bot/logger'; +import type { + QueryResult, + TimeSeriesQuery, + AggregationQuery, + TimeRange, + TableNames +} from './types'; + +// Interface to avoid circular dependency +interface QuestDBClientInterface { + query(sql: string, params?: any[]): Promise>; +} + +/** + * QuestDB Query Builder + * + * Provides a fluent interface for building optimized time-series queries + * with support for QuestDB-specific functions and optimizations. + */ +export class QuestDBQueryBuilder { + private readonly logger: ReturnType; + private query!: { + select: string[]; + from: string; + where: string[]; + groupBy: string[]; + orderBy: string[]; + limit?: number; + sampleBy?: string; + latestBy?: string[]; + timeRange?: TimeRange; + }; + constructor(private readonly client: QuestDBClientInterface) { + this.logger = getLogger('questdb-query-builder'); + this.reset(); + } + + /** + * Reset the query builder + */ + private reset(): QuestDBQueryBuilder { + this.query = { + select: [], + from: '', + where: [], + groupBy: [], + orderBy: [], + sampleBy: undefined, + latestBy: undefined, + timeRange: undefined + }; + return this; + } + /** + * Start a new query + */ + public static create(client: QuestDBClientInterface): QuestDBQueryBuilder { + return new QuestDBQueryBuilder(client); + } + + /** + * Select columns + */ + public select(...columns: string[]): QuestDBQueryBuilder { + this.query.select.push(...columns); + return this; + } + + /** + * Select with aggregation functions + */ + public selectAgg(aggregations: Record): QuestDBQueryBuilder { + Object.entries(aggregations).forEach(([alias, expression]) => { + this.query.select.push(`${expression} as ${alias}`); + }); + return this; + } + + /** + * From table + */ + public from(table: TableNames | string): QuestDBQueryBuilder { + this.query.from = table; + return this; + } + + /** + * Where condition + */ + public where(condition: string): QuestDBQueryBuilder { + this.query.where.push(condition); + return this; + } + + /** + * Where symbol equals + */ + public whereSymbol(symbol: string): QuestDBQueryBuilder { + this.query.where.push(`symbol = '${symbol}'`); + return this; + } + + /** + * Where symbols in list + */ + public whereSymbolIn(symbols: string[]): QuestDBQueryBuilder { + const symbolList = symbols.map(s => `'${s}'`).join(', '); + this.query.where.push(`symbol IN (${symbolList})`); + return this; + } + + /** + * Where exchange equals + */ + public whereExchange(exchange: string): QuestDBQueryBuilder { + this.query.where.push(`exchange = '${exchange}'`); + return this; + } + + /** + * Time range filter + */ + public whereTimeRange(startTime: Date, endTime: Date): QuestDBQueryBuilder { + this.query.timeRange = { startTime, endTime }; + this.query.where.push( + `timestamp >= '${startTime.toISOString()}' AND timestamp <= '${endTime.toISOString()}'` + ); + return this; + } + + /** + * Last N hours + */ + public whereLastHours(hours: number): QuestDBQueryBuilder { + this.query.where.push(`timestamp > dateadd('h', -${hours}, now())`); + return this; + } + + /** + * Last N days + */ + public whereLastDays(days: number): QuestDBQueryBuilder { + this.query.where.push(`timestamp > dateadd('d', -${days}, now())`); + return this; + } + + /** + * Group by columns + */ + public groupBy(...columns: string[]): QuestDBQueryBuilder { + this.query.groupBy.push(...columns); + return this; + } + + /** + * Order by column + */ + public orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): QuestDBQueryBuilder { + this.query.orderBy.push(`${column} ${direction}`); + return this; + } + + /** + * Order by timestamp descending (most recent first) + */ + public orderByTimeDesc(): QuestDBQueryBuilder { + this.query.orderBy.push('timestamp DESC'); + return this; + } + + /** + * Limit results + */ + public limit(count: number): QuestDBQueryBuilder { + this.query.limit = count; + return this; + } + + /** + * Sample by time interval (QuestDB specific) + */ + public sampleBy(interval: string): QuestDBQueryBuilder { + this.query.sampleBy = interval; + return this; + } + + /** + * Latest by columns (QuestDB specific) + */ + public latestBy(...columns: string[]): QuestDBQueryBuilder { + this.query.latestBy = columns; + return this; + } + + /** + * Build and execute the query + */ + public async execute(): Promise> { + const sql = this.build(); + this.logger.debug('Executing query', { sql }); + + try { + const result = await this.client.query(sql); + this.reset(); // Reset for next query + return result; + } catch (error) { + this.logger.error('Query execution failed', { sql, error }); + this.reset(); // Reset even on error + throw error; + } + } + + /** + * Build the SQL query string + */ + public build(): string { + if (!this.query.from) { + throw new Error('FROM clause is required'); + } + + if (this.query.select.length === 0) { + this.query.select.push('*'); + } + + let sql = `SELECT ${this.query.select.join(', ')} FROM ${this.query.from}`; + + // Add WHERE clause + if (this.query.where.length > 0) { + sql += ` WHERE ${this.query.where.join(' AND ')}`; + } + + // Add LATEST BY (QuestDB specific - must come before GROUP BY) + if (this.query.latestBy && this.query.latestBy.length > 0) { + sql += ` LATEST BY ${this.query.latestBy.join(', ')}`; + } + + // Add SAMPLE BY (QuestDB specific) + if (this.query.sampleBy) { + sql += ` SAMPLE BY ${this.query.sampleBy}`; + } + + // Add GROUP BY + if (this.query.groupBy.length > 0) { + sql += ` GROUP BY ${this.query.groupBy.join(', ')}`; + } + + // Add ORDER BY + if (this.query.orderBy.length > 0) { + sql += ` ORDER BY ${this.query.orderBy.join(', ')}`; + } + + // Add LIMIT + if (this.query.limit) { + sql += ` LIMIT ${this.query.limit}`; + } + + return sql; + } + + /** + * Get the built query without executing + */ + public toSQL(): string { + return this.build(); + } + + // Predefined query methods for common use cases + /** + * Get latest OHLCV data for symbols + */ + public static latestOHLCV( + client: QuestDBClientInterface, + symbols: string[], + exchange?: string + ): QuestDBQueryBuilder { + const builder = QuestDBQueryBuilder.create(client) + .select('symbol', 'timestamp', 'open', 'high', 'low', 'close', 'volume') + .from('ohlcv_data') + .whereSymbolIn(symbols) + .latestBy('symbol') + .orderByTimeDesc(); + + if (exchange) { + builder.whereExchange(exchange); + } + + return builder; + } + /** + * Get OHLCV data with time sampling + */ + public static ohlcvTimeSeries( + client: QuestDBClientInterface, + symbol: string, + interval: string, + hours: number = 24 + ): QuestDBQueryBuilder { + return QuestDBQueryBuilder.create(client) + .selectAgg({ + 'first_open': 'first(open)', + 'max_high': 'max(high)', + 'min_low': 'min(low)', + 'last_close': 'last(close)', + 'sum_volume': 'sum(volume)' + }) + .from('ohlcv_data') + .whereSymbol(symbol) + .whereLastHours(hours) + .sampleBy(interval) + .orderByTimeDesc(); + } + /** + * Get market analytics data + */ + public static marketAnalytics( + client: QuestDBClientInterface, + symbols: string[], + hours: number = 1 + ): QuestDBQueryBuilder { + return QuestDBQueryBuilder.create(client) + .select('symbol', 'timestamp', 'rsi', 'macd', 'bollinger_upper', 'bollinger_lower', 'volume_sma') + .from('market_analytics') + .whereSymbolIn(symbols) + .whereLastHours(hours) + .orderBy('symbol') + .orderByTimeDesc(); + } + /** + * Get performance metrics for a time range + */ + public static performanceMetrics( + client: QuestDBClientInterface, + startTime: Date, + endTime: Date + ): QuestDBQueryBuilder { + return QuestDBQueryBuilder.create(client) + .selectAgg({ + 'total_trades': 'count(*)', + 'avg_response_time': 'avg(response_time)', + 'max_response_time': 'max(response_time)', + 'error_rate': 'sum(case when success = false then 1 else 0 end) * 100.0 / count(*)' + }) + .from('performance_metrics') + .whereTimeRange(startTime, endTime) + .sampleBy('1m'); + } + /** + * Get trade execution data + */ + public static tradeExecutions( + client: QuestDBClientInterface, + symbol?: string, + hours: number = 24 + ): QuestDBQueryBuilder { + const builder = QuestDBQueryBuilder.create(client) + .select('symbol', 'timestamp', 'side', 'quantity', 'price', 'execution_time') + .from('trade_executions') + .whereLastHours(hours) + .orderByTimeDesc(); + + if (symbol) { + builder.whereSymbol(symbol); + } + + return builder; + } +} diff --git a/libs/questdb-client/src/schema.ts b/libs/questdb-client/src/schema.ts index 97f1fea..f0dac0d 100644 --- a/libs/questdb-client/src/schema.ts +++ b/libs/questdb-client/src/schema.ts @@ -1,404 +1,404 @@ -import { getLogger } from '@stock-bot/logger'; -import type { TableSchema, IndexDefinition, TableNames, QueryResult } from './types'; - -// Interface to avoid circular dependency -interface QuestDBClientInterface { - query(sql: string, params?: any[]): Promise>; -} - -/** - * QuestDB Schema Manager - * - * Manages database schemas, table creation, and optimization - * for time-series data storage in QuestDB. - */ -export class QuestDBSchemaManager { - private readonly logger: ReturnType; - private readonly schemas: Map = new Map(); - constructor(private readonly client: QuestDBClientInterface) { - this.logger = getLogger('questdb-schema-manager'); - this.initializeSchemas(); - } - - /** - * Initialize predefined schemas - */ - private initializeSchemas(): void { - // OHLCV Data Table - this.schemas.set('ohlcv_data', { - tableName: 'ohlcv_data', - columns: [ - { name: 'symbol', type: 'SYMBOL', nullable: false }, - { name: 'exchange', type: 'SYMBOL', nullable: false }, - { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, - { name: 'open', type: 'DOUBLE', nullable: false }, - { name: 'high', type: 'DOUBLE', nullable: false }, - { name: 'low', type: 'DOUBLE', nullable: false }, - { name: 'close', type: 'DOUBLE', nullable: false }, - { name: 'volume', type: 'LONG', nullable: false }, - { name: 'data_source', type: 'SYMBOL', nullable: true } - ], - partitionBy: 'DAY', - orderBy: ['symbol', 'timestamp'], - indices: [ - { columns: ['symbol'], type: 'HASH' }, - { columns: ['exchange'], type: 'HASH' } - ] - }); - - // Market Analytics Table - this.schemas.set('market_analytics', { - tableName: 'market_analytics', - columns: [ - { name: 'symbol', type: 'SYMBOL', nullable: false }, - { name: 'exchange', type: 'SYMBOL', nullable: false }, - { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, - { name: 'rsi', type: 'DOUBLE', nullable: true }, - { name: 'macd', type: 'DOUBLE', nullable: true }, - { name: 'signal', type: 'DOUBLE', nullable: true }, - { name: 'histogram', type: 'DOUBLE', nullable: true }, - { name: 'bollinger_upper', type: 'DOUBLE', nullable: true }, - { name: 'bollinger_lower', type: 'DOUBLE', nullable: true }, - { name: 'volume_sma', type: 'DOUBLE', nullable: true }, - { name: 'timeframe', type: 'SYMBOL', nullable: true } - ], - partitionBy: 'DAY', - orderBy: ['symbol', 'timestamp'], - indices: [ - { columns: ['symbol'], type: 'HASH' }, - { columns: ['timeframe'], type: 'HASH' } - ] - }); - - // Trade Executions Table - this.schemas.set('trade_executions', { - tableName: 'trade_executions', - columns: [ - { name: 'symbol', type: 'SYMBOL', nullable: false }, - { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, - { name: 'side', type: 'SYMBOL', nullable: false }, - { name: 'quantity', type: 'DOUBLE', nullable: false }, - { name: 'price', type: 'DOUBLE', nullable: false }, - { name: 'execution_time', type: 'LONG', nullable: false }, - { name: 'order_id', type: 'SYMBOL', nullable: true }, - { name: 'strategy', type: 'SYMBOL', nullable: true }, - { name: 'commission', type: 'DOUBLE', nullable: true } - ], - partitionBy: 'DAY', - orderBy: ['symbol', 'timestamp'], - indices: [ - { columns: ['symbol'], type: 'HASH' }, - { columns: ['order_id'], type: 'HASH' }, - { columns: ['strategy'], type: 'HASH' } - ] - }); - - // Performance Metrics Table - this.schemas.set('performance_metrics', { - tableName: 'performance_metrics', - columns: [ - { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, - { name: 'operation', type: 'SYMBOL', nullable: false }, - { name: 'response_time', type: 'LONG', nullable: false }, - { name: 'success', type: 'BOOLEAN', nullable: false }, - { name: 'error_code', type: 'SYMBOL', nullable: true }, - { name: 'component', type: 'SYMBOL', nullable: true } - ], - partitionBy: 'HOUR', - orderBy: ['operation', 'timestamp'], - indices: [ - { columns: ['operation'], type: 'HASH' }, - { columns: ['success'], type: 'HASH' } - ] - }); - - // Portfolio Positions Table - this.schemas.set('portfolio_positions', { - tableName: 'portfolio_positions', - columns: [ - { name: 'portfolio_id', type: 'SYMBOL', nullable: false }, - { name: 'symbol', type: 'SYMBOL', nullable: false }, - { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, - { name: 'quantity', type: 'DOUBLE', nullable: false }, - { name: 'avg_cost', type: 'DOUBLE', nullable: false }, - { name: 'market_value', type: 'DOUBLE', nullable: false }, - { name: 'unrealized_pnl', type: 'DOUBLE', nullable: false }, - { name: 'realized_pnl', type: 'DOUBLE', nullable: false } - ], - partitionBy: 'DAY', - orderBy: ['portfolio_id', 'symbol', 'timestamp'], - indices: [ - { columns: ['portfolio_id'], type: 'HASH' }, - { columns: ['symbol'], type: 'HASH' } - ] - }); - - // Risk Metrics Table - this.schemas.set('risk_metrics', { - tableName: 'risk_metrics', - columns: [ - { name: 'portfolio_id', type: 'SYMBOL', nullable: false }, - { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, - { name: 'var_1d', type: 'DOUBLE', nullable: true }, - { name: 'var_5d', type: 'DOUBLE', nullable: true }, - { name: 'expected_shortfall', type: 'DOUBLE', nullable: true }, - { name: 'beta', type: 'DOUBLE', nullable: true }, - { name: 'sharpe_ratio', type: 'DOUBLE', nullable: true }, - { name: 'max_drawdown', type: 'DOUBLE', nullable: true }, - { name: 'volatility', type: 'DOUBLE', nullable: true } - ], - partitionBy: 'DAY', - orderBy: ['portfolio_id', 'timestamp'], - indices: [ - { columns: ['portfolio_id'], type: 'HASH' } - ] - }); - } - - /** - * Create all tables - */ - public async createAllTables(): Promise { - this.logger.info('Creating all QuestDB tables'); - - for (const [tableName, schema] of this.schemas) { - try { - await this.createTable(schema); - this.logger.info(`Table ${tableName} created successfully`); - } catch (error) { - this.logger.error(`Failed to create table ${tableName}`, error); - throw error; - } - } - } - - /** - * Create a single table - */ - public async createTable(schema: TableSchema): Promise { - const sql = this.buildCreateTableSQL(schema); - - try { - await this.client.query(sql); - this.logger.info(`Table ${schema.tableName} created`, { sql }); - } catch (error) { - // Check if table already exists - if (error instanceof Error && error.message.includes('already exists')) { - this.logger.info(`Table ${schema.tableName} already exists`); - return; - } - throw error; - } - } - - /** - * Drop a table - */ - public async dropTable(tableName: string): Promise { - const sql = `DROP TABLE IF EXISTS ${tableName}`; - - try { - await this.client.query(sql); - this.logger.info(`Table ${tableName} dropped`); - } catch (error) { - this.logger.error(`Failed to drop table ${tableName}`, error); - throw error; - } - } - - /** - * Check if table exists - */ - public async tableExists(tableName: string): Promise { - try { - const result = await this.client.query(` - SELECT COUNT(*) as count - FROM information_schema.tables - WHERE table_name = '${tableName}' - `); - - return result.rows.length > 0 && result.rows[0].count > 0; - } catch (error) { - this.logger.error(`Error checking if table exists: ${tableName}`, error); - return false; - } - } - - /** - * Get table schema - */ - public getSchema(tableName: string): TableSchema | undefined { - return this.schemas.get(tableName); - } - - /** - * Add custom schema - */ - public addSchema(schema: TableSchema): void { - this.schemas.set(schema.tableName, schema); - this.logger.info(`Schema added for table: ${schema.tableName}`); - } - - /** - * Get all schema names - */ - public getSchemaNames(): string[] { - return Array.from(this.schemas.keys()); - } - - /** - * Optimize table (rebuild indices, etc.) - */ - public async optimizeTable(tableName: string): Promise { - const schema = this.schemas.get(tableName); - if (!schema) { - throw new Error(`Schema not found for table: ${tableName}`); - } - - // QuestDB automatically optimizes, but we can analyze table stats - try { - const stats = await this.getTableStats(tableName); - this.logger.info(`Table ${tableName} stats`, stats); - } catch (error) { - this.logger.error(`Failed to optimize table ${tableName}`, error); - throw error; - } - } - - /** - * Get table statistics - */ - public async getTableStats(tableName: string): Promise { - try { - const result = await this.client.query(` - SELECT - COUNT(*) as row_count, - MIN(timestamp) as min_timestamp, - MAX(timestamp) as max_timestamp - FROM ${tableName} - `); - - return result.rows[0] || {}; - } catch (error) { - this.logger.error(`Failed to get table stats for ${tableName}`, error); - throw error; - } - } - - /** - * Truncate table (remove all data but keep structure) - */ - public async truncateTable(tableName: string): Promise { - try { - await this.client.query(`TRUNCATE TABLE ${tableName}`); - this.logger.info(`Table ${tableName} truncated`); - } catch (error) { - this.logger.error(`Failed to truncate table ${tableName}`, error); - throw error; - } - } - - /** - * Create table partitions for future dates - */ - public async createPartitions(tableName: string, days: number = 30): Promise { - // QuestDB handles partitioning automatically based on the PARTITION BY clause - // This method is for future extensibility - this.logger.info(`Partitioning is automatic for table ${tableName}`); - } - - /** - * Build CREATE TABLE SQL statement - */ - private buildCreateTableSQL(schema: TableSchema): string { - const columns = schema.columns.map(col => { - let columnDef = `${col.name} ${col.type}`; - - if (!col.nullable) { - columnDef += ' NOT NULL'; - } - - return columnDef; - }).join(', '); - - let sql = `CREATE TABLE IF NOT EXISTS ${schema.tableName} (${columns})`; - - // Add designated timestamp - const timestampColumn = schema.columns.find(col => col.designated); - if (timestampColumn) { - sql += ` timestamp(${timestampColumn.name})`; - } - - // Add partition by - if (schema.partitionBy) { - sql += ` PARTITION BY ${schema.partitionBy}`; - } - - return sql; - } - - /** - * Build index creation SQL (for future use) - */ - private buildCreateIndexSQL(tableName: string, index: IndexDefinition): string { - const indexName = `idx_${tableName}_${index.columns.join('_')}`; - const columns = index.columns.join(', '); - - // QuestDB uses different index syntax, this is for future compatibility - return `CREATE INDEX ${indexName} ON ${tableName} (${columns})`; - } - - /** - * Validate schema definition - */ - private validateSchema(schema: TableSchema): void { - if (!schema.tableName) { - throw new Error('Table name is required'); - } - - if (!schema.columns || schema.columns.length === 0) { - throw new Error('At least one column is required'); - } - - const timestampColumns = schema.columns.filter(col => col.designated); - if (timestampColumns.length > 1) { - throw new Error('Only one designated timestamp column is allowed'); - } - - if (timestampColumns.length === 0) { - throw new Error('A designated timestamp column is required for time-series tables'); - } - } - - /** - * Get table creation status - */ - public async getTableCreationStatus(): Promise> { - const status: Record = {}; - - for (const tableName of this.schemas.keys()) { - status[tableName] = await this.tableExists(tableName); - } - - return status; - } - - /** - * Initialize database schema - */ - public async initializeDatabase(): Promise { - this.logger.info('Initializing QuestDB schema'); - - // Validate all schemas first - for (const schema of this.schemas.values()) { - this.validateSchema(schema); - } - - // Create all tables - await this.createAllTables(); - - // Get creation status - const status = await this.getTableCreationStatus(); - this.logger.info('Database initialization complete', { tableStatus: status }); - } -} +import { getLogger } from '@stock-bot/logger'; +import type { TableSchema, IndexDefinition, TableNames, QueryResult } from './types'; + +// Interface to avoid circular dependency +interface QuestDBClientInterface { + query(sql: string, params?: any[]): Promise>; +} + +/** + * QuestDB Schema Manager + * + * Manages database schemas, table creation, and optimization + * for time-series data storage in QuestDB. + */ +export class QuestDBSchemaManager { + private readonly logger: ReturnType; + private readonly schemas: Map = new Map(); + constructor(private readonly client: QuestDBClientInterface) { + this.logger = getLogger('questdb-schema-manager'); + this.initializeSchemas(); + } + + /** + * Initialize predefined schemas + */ + private initializeSchemas(): void { + // OHLCV Data Table + this.schemas.set('ohlcv_data', { + tableName: 'ohlcv_data', + columns: [ + { name: 'symbol', type: 'SYMBOL', nullable: false }, + { name: 'exchange', type: 'SYMBOL', nullable: false }, + { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, + { name: 'open', type: 'DOUBLE', nullable: false }, + { name: 'high', type: 'DOUBLE', nullable: false }, + { name: 'low', type: 'DOUBLE', nullable: false }, + { name: 'close', type: 'DOUBLE', nullable: false }, + { name: 'volume', type: 'LONG', nullable: false }, + { name: 'data_source', type: 'SYMBOL', nullable: true } + ], + partitionBy: 'DAY', + orderBy: ['symbol', 'timestamp'], + indices: [ + { columns: ['symbol'], type: 'HASH' }, + { columns: ['exchange'], type: 'HASH' } + ] + }); + + // Market Analytics Table + this.schemas.set('market_analytics', { + tableName: 'market_analytics', + columns: [ + { name: 'symbol', type: 'SYMBOL', nullable: false }, + { name: 'exchange', type: 'SYMBOL', nullable: false }, + { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, + { name: 'rsi', type: 'DOUBLE', nullable: true }, + { name: 'macd', type: 'DOUBLE', nullable: true }, + { name: 'signal', type: 'DOUBLE', nullable: true }, + { name: 'histogram', type: 'DOUBLE', nullable: true }, + { name: 'bollinger_upper', type: 'DOUBLE', nullable: true }, + { name: 'bollinger_lower', type: 'DOUBLE', nullable: true }, + { name: 'volume_sma', type: 'DOUBLE', nullable: true }, + { name: 'timeframe', type: 'SYMBOL', nullable: true } + ], + partitionBy: 'DAY', + orderBy: ['symbol', 'timestamp'], + indices: [ + { columns: ['symbol'], type: 'HASH' }, + { columns: ['timeframe'], type: 'HASH' } + ] + }); + + // Trade Executions Table + this.schemas.set('trade_executions', { + tableName: 'trade_executions', + columns: [ + { name: 'symbol', type: 'SYMBOL', nullable: false }, + { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, + { name: 'side', type: 'SYMBOL', nullable: false }, + { name: 'quantity', type: 'DOUBLE', nullable: false }, + { name: 'price', type: 'DOUBLE', nullable: false }, + { name: 'execution_time', type: 'LONG', nullable: false }, + { name: 'order_id', type: 'SYMBOL', nullable: true }, + { name: 'strategy', type: 'SYMBOL', nullable: true }, + { name: 'commission', type: 'DOUBLE', nullable: true } + ], + partitionBy: 'DAY', + orderBy: ['symbol', 'timestamp'], + indices: [ + { columns: ['symbol'], type: 'HASH' }, + { columns: ['order_id'], type: 'HASH' }, + { columns: ['strategy'], type: 'HASH' } + ] + }); + + // Performance Metrics Table + this.schemas.set('performance_metrics', { + tableName: 'performance_metrics', + columns: [ + { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, + { name: 'operation', type: 'SYMBOL', nullable: false }, + { name: 'response_time', type: 'LONG', nullable: false }, + { name: 'success', type: 'BOOLEAN', nullable: false }, + { name: 'error_code', type: 'SYMBOL', nullable: true }, + { name: 'component', type: 'SYMBOL', nullable: true } + ], + partitionBy: 'HOUR', + orderBy: ['operation', 'timestamp'], + indices: [ + { columns: ['operation'], type: 'HASH' }, + { columns: ['success'], type: 'HASH' } + ] + }); + + // Portfolio Positions Table + this.schemas.set('portfolio_positions', { + tableName: 'portfolio_positions', + columns: [ + { name: 'portfolio_id', type: 'SYMBOL', nullable: false }, + { name: 'symbol', type: 'SYMBOL', nullable: false }, + { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, + { name: 'quantity', type: 'DOUBLE', nullable: false }, + { name: 'avg_cost', type: 'DOUBLE', nullable: false }, + { name: 'market_value', type: 'DOUBLE', nullable: false }, + { name: 'unrealized_pnl', type: 'DOUBLE', nullable: false }, + { name: 'realized_pnl', type: 'DOUBLE', nullable: false } + ], + partitionBy: 'DAY', + orderBy: ['portfolio_id', 'symbol', 'timestamp'], + indices: [ + { columns: ['portfolio_id'], type: 'HASH' }, + { columns: ['symbol'], type: 'HASH' } + ] + }); + + // Risk Metrics Table + this.schemas.set('risk_metrics', { + tableName: 'risk_metrics', + columns: [ + { name: 'portfolio_id', type: 'SYMBOL', nullable: false }, + { name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true }, + { name: 'var_1d', type: 'DOUBLE', nullable: true }, + { name: 'var_5d', type: 'DOUBLE', nullable: true }, + { name: 'expected_shortfall', type: 'DOUBLE', nullable: true }, + { name: 'beta', type: 'DOUBLE', nullable: true }, + { name: 'sharpe_ratio', type: 'DOUBLE', nullable: true }, + { name: 'max_drawdown', type: 'DOUBLE', nullable: true }, + { name: 'volatility', type: 'DOUBLE', nullable: true } + ], + partitionBy: 'DAY', + orderBy: ['portfolio_id', 'timestamp'], + indices: [ + { columns: ['portfolio_id'], type: 'HASH' } + ] + }); + } + + /** + * Create all tables + */ + public async createAllTables(): Promise { + this.logger.info('Creating all QuestDB tables'); + + for (const [tableName, schema] of this.schemas) { + try { + await this.createTable(schema); + this.logger.info(`Table ${tableName} created successfully`); + } catch (error) { + this.logger.error(`Failed to create table ${tableName}`, error); + throw error; + } + } + } + + /** + * Create a single table + */ + public async createTable(schema: TableSchema): Promise { + const sql = this.buildCreateTableSQL(schema); + + try { + await this.client.query(sql); + this.logger.info(`Table ${schema.tableName} created`, { sql }); + } catch (error) { + // Check if table already exists + if (error instanceof Error && error.message.includes('already exists')) { + this.logger.info(`Table ${schema.tableName} already exists`); + return; + } + throw error; + } + } + + /** + * Drop a table + */ + public async dropTable(tableName: string): Promise { + const sql = `DROP TABLE IF EXISTS ${tableName}`; + + try { + await this.client.query(sql); + this.logger.info(`Table ${tableName} dropped`); + } catch (error) { + this.logger.error(`Failed to drop table ${tableName}`, error); + throw error; + } + } + + /** + * Check if table exists + */ + public async tableExists(tableName: string): Promise { + try { + const result = await this.client.query(` + SELECT COUNT(*) as count + FROM information_schema.tables + WHERE table_name = '${tableName}' + `); + + return result.rows.length > 0 && result.rows[0].count > 0; + } catch (error) { + this.logger.error(`Error checking if table exists: ${tableName}`, error); + return false; + } + } + + /** + * Get table schema + */ + public getSchema(tableName: string): TableSchema | undefined { + return this.schemas.get(tableName); + } + + /** + * Add custom schema + */ + public addSchema(schema: TableSchema): void { + this.schemas.set(schema.tableName, schema); + this.logger.info(`Schema added for table: ${schema.tableName}`); + } + + /** + * Get all schema names + */ + public getSchemaNames(): string[] { + return Array.from(this.schemas.keys()); + } + + /** + * Optimize table (rebuild indices, etc.) + */ + public async optimizeTable(tableName: string): Promise { + const schema = this.schemas.get(tableName); + if (!schema) { + throw new Error(`Schema not found for table: ${tableName}`); + } + + // QuestDB automatically optimizes, but we can analyze table stats + try { + const stats = await this.getTableStats(tableName); + this.logger.info(`Table ${tableName} stats`, stats); + } catch (error) { + this.logger.error(`Failed to optimize table ${tableName}`, error); + throw error; + } + } + + /** + * Get table statistics + */ + public async getTableStats(tableName: string): Promise { + try { + const result = await this.client.query(` + SELECT + COUNT(*) as row_count, + MIN(timestamp) as min_timestamp, + MAX(timestamp) as max_timestamp + FROM ${tableName} + `); + + return result.rows[0] || {}; + } catch (error) { + this.logger.error(`Failed to get table stats for ${tableName}`, error); + throw error; + } + } + + /** + * Truncate table (remove all data but keep structure) + */ + public async truncateTable(tableName: string): Promise { + try { + await this.client.query(`TRUNCATE TABLE ${tableName}`); + this.logger.info(`Table ${tableName} truncated`); + } catch (error) { + this.logger.error(`Failed to truncate table ${tableName}`, error); + throw error; + } + } + + /** + * Create table partitions for future dates + */ + public async createPartitions(tableName: string, days: number = 30): Promise { + // QuestDB handles partitioning automatically based on the PARTITION BY clause + // This method is for future extensibility + this.logger.info(`Partitioning is automatic for table ${tableName}`); + } + + /** + * Build CREATE TABLE SQL statement + */ + private buildCreateTableSQL(schema: TableSchema): string { + const columns = schema.columns.map(col => { + let columnDef = `${col.name} ${col.type}`; + + if (!col.nullable) { + columnDef += ' NOT NULL'; + } + + return columnDef; + }).join(', '); + + let sql = `CREATE TABLE IF NOT EXISTS ${schema.tableName} (${columns})`; + + // Add designated timestamp + const timestampColumn = schema.columns.find(col => col.designated); + if (timestampColumn) { + sql += ` timestamp(${timestampColumn.name})`; + } + + // Add partition by + if (schema.partitionBy) { + sql += ` PARTITION BY ${schema.partitionBy}`; + } + + return sql; + } + + /** + * Build index creation SQL (for future use) + */ + private buildCreateIndexSQL(tableName: string, index: IndexDefinition): string { + const indexName = `idx_${tableName}_${index.columns.join('_')}`; + const columns = index.columns.join(', '); + + // QuestDB uses different index syntax, this is for future compatibility + return `CREATE INDEX ${indexName} ON ${tableName} (${columns})`; + } + + /** + * Validate schema definition + */ + private validateSchema(schema: TableSchema): void { + if (!schema.tableName) { + throw new Error('Table name is required'); + } + + if (!schema.columns || schema.columns.length === 0) { + throw new Error('At least one column is required'); + } + + const timestampColumns = schema.columns.filter(col => col.designated); + if (timestampColumns.length > 1) { + throw new Error('Only one designated timestamp column is allowed'); + } + + if (timestampColumns.length === 0) { + throw new Error('A designated timestamp column is required for time-series tables'); + } + } + + /** + * Get table creation status + */ + public async getTableCreationStatus(): Promise> { + const status: Record = {}; + + for (const tableName of this.schemas.keys()) { + status[tableName] = await this.tableExists(tableName); + } + + return status; + } + + /** + * Initialize database schema + */ + public async initializeDatabase(): Promise { + this.logger.info('Initializing QuestDB schema'); + + // Validate all schemas first + for (const schema of this.schemas.values()) { + this.validateSchema(schema); + } + + // Create all tables + await this.createAllTables(); + + // Get creation status + const status = await this.getTableCreationStatus(); + this.logger.info('Database initialization complete', { tableStatus: status }); + } +} diff --git a/libs/questdb-client/src/types.ts b/libs/questdb-client/src/types.ts index cb60e93..3bba5c6 100644 --- a/libs/questdb-client/src/types.ts +++ b/libs/questdb-client/src/types.ts @@ -1,284 +1,284 @@ -/** - * QuestDB Client Configuration and Types - */ - -/** - * QuestDB Client Configuration - */ -export interface QuestDBClientConfig { - host: string; - httpPort: number; - pgPort: number; - influxPort: number; - user?: string; - password?: string; - database?: string; - tls?: { - enabled: boolean; - verifyServerCert: boolean; - }; - timeouts?: { - connection: number; - request: number; - }; - retryAttempts?: number; -} - -/** - * QuestDB Connection Options - */ -export interface QuestDBConnectionOptions { - protocol?: 'http' | 'pg' | 'influx'; - retryAttempts?: number; - retryDelay?: number; - healthCheckInterval?: number; -} - -/** - * Health Status Types - */ -export type QuestDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy'; - -export interface QuestDBHealthCheck { - status: QuestDBHealthStatus; - timestamp: Date; - latency: number; - protocols: { - http: boolean; - pg: boolean; - influx: boolean; - }; - errors?: string[]; -} - -export interface QuestDBMetrics { - queriesPerSecond: number; - insertsPerSecond: number; - averageQueryTime: number; - errorRate: number; - dataIngestionRate: number; - storageSize: number; -} - -/** - * Table Names for Time-Series Data - */ -export type TableNames = - | 'ohlcv' - | 'trades' - | 'quotes' - | 'indicators' - | 'performance' - | 'risk_metrics' - | 'market_events' - | 'strategy_signals' - | 'portfolio_snapshots'; - -/** - * Time-Series Data Types - */ -export interface BaseTimeSeriesData { - timestamp: Date; - symbol?: string; -} - -export interface OHLCVData extends BaseTimeSeriesData { - open: number; - high: number; - low: number; - close: number; - volume: number; - timeframe: string; // '1m', '5m', '1h', '1d', etc. - source: string; -} - -export interface TradeData extends BaseTimeSeriesData { - trade_id: string; - price: number; - quantity: number; - side: 'buy' | 'sell'; - exchange: string; - conditions?: string[]; -} - -export interface QuoteData extends BaseTimeSeriesData { - bid_price: number; - bid_size: number; - ask_price: number; - ask_size: number; - exchange: string; - spread: number; -} - -export interface IndicatorData extends BaseTimeSeriesData { - indicator_name: string; - value: number; - parameters?: Record; - timeframe: string; -} - -export interface PerformanceData extends BaseTimeSeriesData { - portfolio_id: string; - total_value: number; - cash_balance: number; - unrealized_pnl: number; - realized_pnl: number; - daily_return: number; - cumulative_return: number; -} - -export interface RiskMetrics extends BaseTimeSeriesData { - portfolio_id?: string; - strategy_id?: string; - metric_name: string; - value: number; - threshold?: number; - status: 'normal' | 'warning' | 'breach'; -} - -/** - * Query Result Types - */ -export interface QueryResult { - rows: T[]; - rowCount: number; - executionTime: number; - metadata?: { - columns: Array<{ - name: string; - type: string; - }>; - }; -} - -export interface InsertResult { - rowsInserted: number; - executionTime: number; - errors?: string[]; -} - -/** - * Schema Definition Types - */ -export interface ColumnDefinition { - name: string; - type: 'SYMBOL' | 'STRING' | 'DOUBLE' | 'FLOAT' | 'LONG' | 'INT' | 'BOOLEAN' | 'TIMESTAMP' | 'DATE' | 'BINARY'; - indexed?: boolean; - capacity?: number; // For SYMBOL type -} - -export interface TableDefinition { - name: string; - columns: ColumnDefinition[]; - partitionBy?: 'NONE' | 'DAY' | 'MONTH' | 'YEAR'; - timestamp?: string; // Column name to use as designated timestamp - dedup?: boolean; -} - -/** - * Connection Pool Types - */ -export interface ConnectionPoolConfig { - minConnections: number; - maxConnections: number; - idleTimeout: number; - acquireTimeout: number; -} - -/** - * Health Monitoring Types - */ -export interface HealthStatus { - isHealthy: boolean; - lastCheck: Date; - responseTime: number; - message: string; - error?: Error; - details?: { - pgPool: boolean; - httpEndpoint: boolean; - uptime: number; - }; -} - -export interface PerformanceMetrics { - totalQueries: number; - successfulQueries: number; - failedQueries: number; - averageResponseTime: number; - lastQueryTime: Date | null; - connectionUptime: number; - memoryUsage: number; -} - -/** - * Query Builder Types - */ -export interface TimeSeriesQuery { - table: TableNames | string; - columns?: string[]; - timeRange?: TimeRange; - groupBy?: string[]; - aggregations?: Record; - sampleBy?: string; - latestBy?: string[]; - orderBy?: Array<{ column: string; direction: 'ASC' | 'DESC' }>; - limit?: number; -} - -export interface AggregationQuery { - aggregations: Record; - groupBy?: string[]; - having?: string[]; -} - -export interface TimeRange { - startTime: Date; - endTime: Date; -} - -/** - * InfluxDB Line Protocol Types - */ -export interface InfluxLineData { - measurement: string; - tags: Record; - fields: Record; - timestamp?: Date; -} - -export interface InfluxWriteOptions { - batchSize?: number; - flushInterval?: number; - autoFlush?: boolean; - precision?: 'ns' | 'us' | 'ms' | 's'; - retryAttempts?: number; - retryDelay?: number; -} - -/** - * Schema Management Types - */ -export interface TableSchema { - tableName: string; - columns: ColumnSchema[]; - partitionBy?: 'NONE' | 'HOUR' | 'DAY' | 'MONTH' | 'YEAR'; - orderBy?: string[]; - indices?: IndexDefinition[]; - dedup?: boolean; -} - -export interface ColumnSchema { - name: string; - type: 'SYMBOL' | 'STRING' | 'DOUBLE' | 'FLOAT' | 'LONG' | 'INT' | 'BOOLEAN' | 'TIMESTAMP' | 'DATE' | 'BINARY'; - nullable?: boolean; - designated?: boolean; // For designated timestamp column - capacity?: number; // For SYMBOL type - indexed?: boolean; -} - -export interface IndexDefinition { - columns: string[]; - type: 'HASH' | 'BTREE'; - unique?: boolean; -} +/** + * QuestDB Client Configuration and Types + */ + +/** + * QuestDB Client Configuration + */ +export interface QuestDBClientConfig { + host: string; + httpPort: number; + pgPort: number; + influxPort: number; + user?: string; + password?: string; + database?: string; + tls?: { + enabled: boolean; + verifyServerCert: boolean; + }; + timeouts?: { + connection: number; + request: number; + }; + retryAttempts?: number; +} + +/** + * QuestDB Connection Options + */ +export interface QuestDBConnectionOptions { + protocol?: 'http' | 'pg' | 'influx'; + retryAttempts?: number; + retryDelay?: number; + healthCheckInterval?: number; +} + +/** + * Health Status Types + */ +export type QuestDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy'; + +export interface QuestDBHealthCheck { + status: QuestDBHealthStatus; + timestamp: Date; + latency: number; + protocols: { + http: boolean; + pg: boolean; + influx: boolean; + }; + errors?: string[]; +} + +export interface QuestDBMetrics { + queriesPerSecond: number; + insertsPerSecond: number; + averageQueryTime: number; + errorRate: number; + dataIngestionRate: number; + storageSize: number; +} + +/** + * Table Names for Time-Series Data + */ +export type TableNames = + | 'ohlcv' + | 'trades' + | 'quotes' + | 'indicators' + | 'performance' + | 'risk_metrics' + | 'market_events' + | 'strategy_signals' + | 'portfolio_snapshots'; + +/** + * Time-Series Data Types + */ +export interface BaseTimeSeriesData { + timestamp: Date; + symbol?: string; +} + +export interface OHLCVData extends BaseTimeSeriesData { + open: number; + high: number; + low: number; + close: number; + volume: number; + timeframe: string; // '1m', '5m', '1h', '1d', etc. + source: string; +} + +export interface TradeData extends BaseTimeSeriesData { + trade_id: string; + price: number; + quantity: number; + side: 'buy' | 'sell'; + exchange: string; + conditions?: string[]; +} + +export interface QuoteData extends BaseTimeSeriesData { + bid_price: number; + bid_size: number; + ask_price: number; + ask_size: number; + exchange: string; + spread: number; +} + +export interface IndicatorData extends BaseTimeSeriesData { + indicator_name: string; + value: number; + parameters?: Record; + timeframe: string; +} + +export interface PerformanceData extends BaseTimeSeriesData { + portfolio_id: string; + total_value: number; + cash_balance: number; + unrealized_pnl: number; + realized_pnl: number; + daily_return: number; + cumulative_return: number; +} + +export interface RiskMetrics extends BaseTimeSeriesData { + portfolio_id?: string; + strategy_id?: string; + metric_name: string; + value: number; + threshold?: number; + status: 'normal' | 'warning' | 'breach'; +} + +/** + * Query Result Types + */ +export interface QueryResult { + rows: T[]; + rowCount: number; + executionTime: number; + metadata?: { + columns: Array<{ + name: string; + type: string; + }>; + }; +} + +export interface InsertResult { + rowsInserted: number; + executionTime: number; + errors?: string[]; +} + +/** + * Schema Definition Types + */ +export interface ColumnDefinition { + name: string; + type: 'SYMBOL' | 'STRING' | 'DOUBLE' | 'FLOAT' | 'LONG' | 'INT' | 'BOOLEAN' | 'TIMESTAMP' | 'DATE' | 'BINARY'; + indexed?: boolean; + capacity?: number; // For SYMBOL type +} + +export interface TableDefinition { + name: string; + columns: ColumnDefinition[]; + partitionBy?: 'NONE' | 'DAY' | 'MONTH' | 'YEAR'; + timestamp?: string; // Column name to use as designated timestamp + dedup?: boolean; +} + +/** + * Connection Pool Types + */ +export interface ConnectionPoolConfig { + minConnections: number; + maxConnections: number; + idleTimeout: number; + acquireTimeout: number; +} + +/** + * Health Monitoring Types + */ +export interface HealthStatus { + isHealthy: boolean; + lastCheck: Date; + responseTime: number; + message: string; + error?: Error; + details?: { + pgPool: boolean; + httpEndpoint: boolean; + uptime: number; + }; +} + +export interface PerformanceMetrics { + totalQueries: number; + successfulQueries: number; + failedQueries: number; + averageResponseTime: number; + lastQueryTime: Date | null; + connectionUptime: number; + memoryUsage: number; +} + +/** + * Query Builder Types + */ +export interface TimeSeriesQuery { + table: TableNames | string; + columns?: string[]; + timeRange?: TimeRange; + groupBy?: string[]; + aggregations?: Record; + sampleBy?: string; + latestBy?: string[]; + orderBy?: Array<{ column: string; direction: 'ASC' | 'DESC' }>; + limit?: number; +} + +export interface AggregationQuery { + aggregations: Record; + groupBy?: string[]; + having?: string[]; +} + +export interface TimeRange { + startTime: Date; + endTime: Date; +} + +/** + * InfluxDB Line Protocol Types + */ +export interface InfluxLineData { + measurement: string; + tags: Record; + fields: Record; + timestamp?: Date; +} + +export interface InfluxWriteOptions { + batchSize?: number; + flushInterval?: number; + autoFlush?: boolean; + precision?: 'ns' | 'us' | 'ms' | 's'; + retryAttempts?: number; + retryDelay?: number; +} + +/** + * Schema Management Types + */ +export interface TableSchema { + tableName: string; + columns: ColumnSchema[]; + partitionBy?: 'NONE' | 'HOUR' | 'DAY' | 'MONTH' | 'YEAR'; + orderBy?: string[]; + indices?: IndexDefinition[]; + dedup?: boolean; +} + +export interface ColumnSchema { + name: string; + type: 'SYMBOL' | 'STRING' | 'DOUBLE' | 'FLOAT' | 'LONG' | 'INT' | 'BOOLEAN' | 'TIMESTAMP' | 'DATE' | 'BINARY'; + nullable?: boolean; + designated?: boolean; // For designated timestamp column + capacity?: number; // For SYMBOL type + indexed?: boolean; +} + +export interface IndexDefinition { + columns: string[]; + type: 'HASH' | 'BTREE'; + unique?: boolean; +} diff --git a/libs/questdb-client/test/integration.test.ts b/libs/questdb-client/test/integration.test.ts index 654f3e7..c8ce6e5 100644 --- a/libs/questdb-client/test/integration.test.ts +++ b/libs/questdb-client/test/integration.test.ts @@ -1,239 +1,239 @@ -/** - * QuestDB Client Integration Test - * - * This test validates that all components work together correctly - * without requiring an actual QuestDB instance. - */ - -import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from 'bun:test'; -import { - QuestDBClient, - QuestDBHealthMonitor, - QuestDBQueryBuilder, - QuestDBInfluxWriter, - QuestDBSchemaManager, - createQuestDBClient -} from '../src'; -import { questdbTestHelpers } from './setup'; - -describe('QuestDB Client Integration', () => { - let client: QuestDBClient; beforeEach(() => { - client = new QuestDBClient({ - host: 'localhost', - httpPort: 9000, - pgPort: 8812, - influxPort: 9009, - database: 'questdb', - user: 'admin', - password: 'quest' - }); - }); afterEach(async () => { - if (client && client.connected) { - try { - await client.disconnect(); - } catch (error) { - // Ignore cleanup errors in tests - } - } - }); - - describe('Client Initialization', () => { - it('should create client with factory function', () => { - const factoryClient = createQuestDBClient(); - expect(factoryClient).toBeInstanceOf(QuestDBClient); - }); - - it('should initialize all supporting classes', () => { - expect(client.getHealthMonitor()).toBeInstanceOf(QuestDBHealthMonitor); - expect(client.queryBuilder()).toBeInstanceOf(QuestDBQueryBuilder); - expect(client.getInfluxWriter()).toBeInstanceOf(QuestDBInfluxWriter); - expect(client.getSchemaManager()).toBeInstanceOf(QuestDBSchemaManager); - }); - - it('should handle connection configuration', () => { - expect(client.getHttpUrl()).toBe('http://localhost:9000'); - expect(client.getInfluxUrl()).toBe('http://localhost:9009'); - expect(client.connected).toBe(false); - }); - }); - - describe('Query Builder', () => { - it('should build query using query builder', () => { - const query = client.queryBuilder() - .select('symbol', 'close', 'timestamp') - .from('ohlcv') - .whereSymbol('AAPL') - .whereLastHours(24) - .orderBy('timestamp', 'DESC') - .limit(100) - .build(); - - expect(query).toContain('SELECT symbol, close, timestamp'); - expect(query).toContain('FROM ohlcv'); - expect(query).toContain("symbol = 'AAPL'"); - expect(query).toContain('ORDER BY timestamp DESC'); - expect(query).toContain('LIMIT 100'); - expect(questdbTestHelpers.validateQuestDBQuery(query)).toBe(true); - }); - - it('should build time-series specific queries', () => { - const latestQuery = client.queryBuilder() - .select('*') - .from('ohlcv') - .latestBy('symbol') - .build(); - - expect(latestQuery).toContain('LATEST BY symbol'); - expect(questdbTestHelpers.validateQuestDBQuery(latestQuery)).toBe(true); - - const sampleQuery = client.queryBuilder() - .select('symbol', 'avg(close)') - .from('ohlcv') - .sampleBy('1d') - .build(); - - expect(sampleQuery).toContain('SAMPLE BY 1d'); - expect(questdbTestHelpers.validateQuestDBQuery(sampleQuery)).toBe(true); - }); - - it('should build aggregation queries', () => { - const query = client.aggregate('ohlcv') - .select('symbol', 'avg(close) as avg_price', 'max(high) as max_high') - .whereSymbolIn(['AAPL', 'GOOGL']) - .groupBy('symbol') - .sampleBy('1h') - .build(); - - expect(query).toContain('SELECT symbol, avg(close) as avg_price, max(high) as max_high'); - expect(query).toContain('FROM ohlcv'); - expect(query).toContain("symbol IN ('AAPL', 'GOOGL')"); - expect(query).toContain('SAMPLE BY 1h'); - expect(query).toContain('GROUP BY symbol'); - expect(questdbTestHelpers.validateQuestDBQuery(query)).toBe(true); - }); - }); - describe('InfluxDB Writer', () => { - it('should write OHLCV data using InfluxDB line protocol', async () => { - const ohlcvData = [{ - timestamp: new Date('2024-01-01T12:00:00Z'), - open: 150.00, - high: 152.00, - low: 149.50, - close: 151.50, - volume: 1000000 - }]; - - // Mock the actual write operation - const writeSpy = spyOn(client.getInfluxWriter(), 'writeOHLCV'); - writeSpy.mockReturnValue(Promise.resolve()); await expect(async () => { - await client.writeOHLCV('AAPL', 'NASDAQ', ohlcvData); - }).not.toThrow(); - }); - - it('should handle batch operations', () => { - const lines = questdbTestHelpers.generateInfluxDBLines(3); - expect(lines.length).toBe(3); - - lines.forEach(line => { - expect(line).toContain('ohlcv,symbol=TEST'); - expect(line).toMatch(/\d{19}$/); // Nanosecond timestamp - }); }); - }); - - describe('Schema Manager', () => { - it('should provide schema access', () => { - const schema = client.getSchemaManager().getSchema('ohlcv_data'); - - expect(schema).toBeDefined(); - expect(schema?.tableName).toBe('ohlcv_data'); - - const symbolColumn = schema?.columns.find(col => col.name === 'symbol'); - expect(symbolColumn).toBeDefined(); - expect(symbolColumn?.type).toBe('SYMBOL'); - - expect(schema?.partitionBy).toBe('DAY'); }); - }); - - describe('Health Monitor', () => { - it('should provide health monitoring capabilities', async () => { - const healthMonitor = client.getHealthMonitor(); - expect(healthMonitor).toBeInstanceOf(QuestDBHealthMonitor); - - // Mock health status since we're not connected - const mockHealthStatus = { - isHealthy: false, - lastCheck: new Date(), - responseTime: 100, - message: 'Connection not established', - details: { - pgPool: false, - httpEndpoint: false, - uptime: 0 } - }; - - const healthSpy = spyOn(healthMonitor, 'getHealthStatus'); - healthSpy.mockReturnValue(Promise.resolve(mockHealthStatus)); - - const health = await healthMonitor.getHealthStatus(); - expect(health.isHealthy).toBe(false); - expect(health.lastCheck).toBeInstanceOf(Date); - expect(health.message).toBe('Connection not established'); - }); - }); - describe('Time-Series Operations', () => { - it('should support latest by operations', async () => { - // Mock the query execution - const mockResult = { - rows: [{ symbol: 'AAPL', close: 150.00, timestamp: new Date() }], - rowCount: 1, - executionTime: 10, - metadata: { columns: [] } - }; - - const querySpy = spyOn(client, 'query'); - querySpy.mockReturnValue(Promise.resolve(mockResult)); - - const result = await client.latestBy('ohlcv', ['symbol', 'close'], 'symbol'); expect(result.rows.length).toBe(1); - expect(result.rows[0].symbol).toBe('AAPL'); - }); - - it('should support sample by operations', async () => { - // Mock the query execution - const mockResult = { - rows: [ - { symbol: 'AAPL', avg_close: 150.00, timestamp: new Date() } - ], - rowCount: 1, - executionTime: 15, - metadata: { columns: [] } - }; - - const querySpy = spyOn(client, 'query'); - querySpy.mockReturnValue(Promise.resolve(mockResult)); const result = await client.sampleBy( - 'ohlcv', - ['symbol', 'avg(close) as avg_close'], - '1h', - 'timestamp', - "symbol = 'AAPL'" - ); - - expect(result.rows.length).toBe(1); - expect(result.executionTime).toBe(15); - }); - }); - - describe('Connection Management', () => { - it('should handle connection configuration', () => { - expect(client.getHttpUrl()).toBe('http://localhost:9000'); - expect(client.getInfluxUrl()).toBe('http://localhost:9009'); - expect(client.connected).toBe(false); - }); - - it('should provide configuration access', () => { - const config = client.configuration; - expect(config.host).toBe('localhost'); - expect(config.httpPort).toBe(9000); - expect(config.user).toBe('admin'); - }); - }); -}); +/** + * QuestDB Client Integration Test + * + * This test validates that all components work together correctly + * without requiring an actual QuestDB instance. + */ + +import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from 'bun:test'; +import { + QuestDBClient, + QuestDBHealthMonitor, + QuestDBQueryBuilder, + QuestDBInfluxWriter, + QuestDBSchemaManager, + createQuestDBClient +} from '../src'; +import { questdbTestHelpers } from './setup'; + +describe('QuestDB Client Integration', () => { + let client: QuestDBClient; beforeEach(() => { + client = new QuestDBClient({ + host: 'localhost', + httpPort: 9000, + pgPort: 8812, + influxPort: 9009, + database: 'questdb', + user: 'admin', + password: 'quest' + }); + }); afterEach(async () => { + if (client && client.connected) { + try { + await client.disconnect(); + } catch (error) { + // Ignore cleanup errors in tests + } + } + }); + + describe('Client Initialization', () => { + it('should create client with factory function', () => { + const factoryClient = createQuestDBClient(); + expect(factoryClient).toBeInstanceOf(QuestDBClient); + }); + + it('should initialize all supporting classes', () => { + expect(client.getHealthMonitor()).toBeInstanceOf(QuestDBHealthMonitor); + expect(client.queryBuilder()).toBeInstanceOf(QuestDBQueryBuilder); + expect(client.getInfluxWriter()).toBeInstanceOf(QuestDBInfluxWriter); + expect(client.getSchemaManager()).toBeInstanceOf(QuestDBSchemaManager); + }); + + it('should handle connection configuration', () => { + expect(client.getHttpUrl()).toBe('http://localhost:9000'); + expect(client.getInfluxUrl()).toBe('http://localhost:9009'); + expect(client.connected).toBe(false); + }); + }); + + describe('Query Builder', () => { + it('should build query using query builder', () => { + const query = client.queryBuilder() + .select('symbol', 'close', 'timestamp') + .from('ohlcv') + .whereSymbol('AAPL') + .whereLastHours(24) + .orderBy('timestamp', 'DESC') + .limit(100) + .build(); + + expect(query).toContain('SELECT symbol, close, timestamp'); + expect(query).toContain('FROM ohlcv'); + expect(query).toContain("symbol = 'AAPL'"); + expect(query).toContain('ORDER BY timestamp DESC'); + expect(query).toContain('LIMIT 100'); + expect(questdbTestHelpers.validateQuestDBQuery(query)).toBe(true); + }); + + it('should build time-series specific queries', () => { + const latestQuery = client.queryBuilder() + .select('*') + .from('ohlcv') + .latestBy('symbol') + .build(); + + expect(latestQuery).toContain('LATEST BY symbol'); + expect(questdbTestHelpers.validateQuestDBQuery(latestQuery)).toBe(true); + + const sampleQuery = client.queryBuilder() + .select('symbol', 'avg(close)') + .from('ohlcv') + .sampleBy('1d') + .build(); + + expect(sampleQuery).toContain('SAMPLE BY 1d'); + expect(questdbTestHelpers.validateQuestDBQuery(sampleQuery)).toBe(true); + }); + + it('should build aggregation queries', () => { + const query = client.aggregate('ohlcv') + .select('symbol', 'avg(close) as avg_price', 'max(high) as max_high') + .whereSymbolIn(['AAPL', 'GOOGL']) + .groupBy('symbol') + .sampleBy('1h') + .build(); + + expect(query).toContain('SELECT symbol, avg(close) as avg_price, max(high) as max_high'); + expect(query).toContain('FROM ohlcv'); + expect(query).toContain("symbol IN ('AAPL', 'GOOGL')"); + expect(query).toContain('SAMPLE BY 1h'); + expect(query).toContain('GROUP BY symbol'); + expect(questdbTestHelpers.validateQuestDBQuery(query)).toBe(true); + }); + }); + describe('InfluxDB Writer', () => { + it('should write OHLCV data using InfluxDB line protocol', async () => { + const ohlcvData = [{ + timestamp: new Date('2024-01-01T12:00:00Z'), + open: 150.00, + high: 152.00, + low: 149.50, + close: 151.50, + volume: 1000000 + }]; + + // Mock the actual write operation + const writeSpy = spyOn(client.getInfluxWriter(), 'writeOHLCV'); + writeSpy.mockReturnValue(Promise.resolve()); await expect(async () => { + await client.writeOHLCV('AAPL', 'NASDAQ', ohlcvData); + }).not.toThrow(); + }); + + it('should handle batch operations', () => { + const lines = questdbTestHelpers.generateInfluxDBLines(3); + expect(lines.length).toBe(3); + + lines.forEach(line => { + expect(line).toContain('ohlcv,symbol=TEST'); + expect(line).toMatch(/\d{19}$/); // Nanosecond timestamp + }); }); + }); + + describe('Schema Manager', () => { + it('should provide schema access', () => { + const schema = client.getSchemaManager().getSchema('ohlcv_data'); + + expect(schema).toBeDefined(); + expect(schema?.tableName).toBe('ohlcv_data'); + + const symbolColumn = schema?.columns.find(col => col.name === 'symbol'); + expect(symbolColumn).toBeDefined(); + expect(symbolColumn?.type).toBe('SYMBOL'); + + expect(schema?.partitionBy).toBe('DAY'); }); + }); + + describe('Health Monitor', () => { + it('should provide health monitoring capabilities', async () => { + const healthMonitor = client.getHealthMonitor(); + expect(healthMonitor).toBeInstanceOf(QuestDBHealthMonitor); + + // Mock health status since we're not connected + const mockHealthStatus = { + isHealthy: false, + lastCheck: new Date(), + responseTime: 100, + message: 'Connection not established', + details: { + pgPool: false, + httpEndpoint: false, + uptime: 0 } + }; + + const healthSpy = spyOn(healthMonitor, 'getHealthStatus'); + healthSpy.mockReturnValue(Promise.resolve(mockHealthStatus)); + + const health = await healthMonitor.getHealthStatus(); + expect(health.isHealthy).toBe(false); + expect(health.lastCheck).toBeInstanceOf(Date); + expect(health.message).toBe('Connection not established'); + }); + }); + describe('Time-Series Operations', () => { + it('should support latest by operations', async () => { + // Mock the query execution + const mockResult = { + rows: [{ symbol: 'AAPL', close: 150.00, timestamp: new Date() }], + rowCount: 1, + executionTime: 10, + metadata: { columns: [] } + }; + + const querySpy = spyOn(client, 'query'); + querySpy.mockReturnValue(Promise.resolve(mockResult)); + + const result = await client.latestBy('ohlcv', ['symbol', 'close'], 'symbol'); expect(result.rows.length).toBe(1); + expect(result.rows[0].symbol).toBe('AAPL'); + }); + + it('should support sample by operations', async () => { + // Mock the query execution + const mockResult = { + rows: [ + { symbol: 'AAPL', avg_close: 150.00, timestamp: new Date() } + ], + rowCount: 1, + executionTime: 15, + metadata: { columns: [] } + }; + + const querySpy = spyOn(client, 'query'); + querySpy.mockReturnValue(Promise.resolve(mockResult)); const result = await client.sampleBy( + 'ohlcv', + ['symbol', 'avg(close) as avg_close'], + '1h', + 'timestamp', + "symbol = 'AAPL'" + ); + + expect(result.rows.length).toBe(1); + expect(result.executionTime).toBe(15); + }); + }); + + describe('Connection Management', () => { + it('should handle connection configuration', () => { + expect(client.getHttpUrl()).toBe('http://localhost:9000'); + expect(client.getInfluxUrl()).toBe('http://localhost:9009'); + expect(client.connected).toBe(false); + }); + + it('should provide configuration access', () => { + const config = client.configuration; + expect(config.host).toBe('localhost'); + expect(config.httpPort).toBe(9000); + expect(config.user).toBe('admin'); + }); + }); +}); diff --git a/libs/questdb-client/test/setup.ts b/libs/questdb-client/test/setup.ts index f1d2683..4e71f1b 100644 --- a/libs/questdb-client/test/setup.ts +++ b/libs/questdb-client/test/setup.ts @@ -1,284 +1,284 @@ -/** - * QuestDB Client Test Setup - * - * Setup file specific to QuestDB client library tests. - * Provides utilities and mocks for testing database operations. - */ - -import { newDb } from 'pg-mem'; -import { mock, spyOn, beforeAll, beforeEach } from 'bun:test'; - -// Mock PostgreSQL database for unit tests -let pgMem: any; - -beforeAll(() => { - // Create in-memory PostgreSQL database - pgMem = newDb(); - - // Register QuestDB-specific functions - pgMem.public.registerFunction({ - name: 'now', - implementation: () => new Date().toISOString() - }); - - pgMem.public.registerFunction({ - name: 'dateadd', - args: [{ type: 'text' }, { type: 'int' }, { type: 'timestamp' }], - returns: 'timestamp', - implementation: (unit: string, amount: number, date: Date) => { - const result = new Date(date); - switch (unit) { - case 'd': - case 'day': - result.setDate(result.getDate() + amount); - break; - case 'h': - case 'hour': - result.setHours(result.getHours() + amount); - break; - case 'm': - case 'minute': - result.setMinutes(result.getMinutes() + amount); - break; - default: - throw new Error(`Unsupported date unit: ${unit}`); - } - return result; - } }); // Mock QuestDB HTTP client - // Mock fetch using Bun's built-in mock - (global as any).fetch = mock(() => {}); - - // Mock the logger module to avoid Pino configuration conflicts - mock.module('@stock-bot/logger', () => ({ - Logger: mock(() => ({ - info: mock(() => {}), - warn: mock(() => {}), - error: mock(() => {}), - debug: mock(() => {}), - fatal: mock(() => {}), - trace: mock(() => {}), - child: mock(() => ({ - info: mock(() => {}), - warn: mock(() => {}), - error: mock(() => {}), - debug: mock(() => {}), - fatal: mock(() => {}), - trace: mock(() => {}), - })) - })), - getLogger: mock(() => ({ - info: mock(() => {}), - warn: mock(() => {}), - error: mock(() => {}), - debug: mock(() => {}), - fatal: mock(() => {}), - trace: mock(() => {}), - child: mock(() => ({ - info: mock(() => {}), - warn: mock(() => {}), - error: mock(() => {}), - debug: mock(() => {}), - fatal: mock(() => {}), - trace: mock(() => {}), - })) - })) - })); - - // Mock Pino and its transports to avoid configuration conflicts - mock.module('pino', () => ({ - default: mock(() => ({ - info: mock(() => {}), - warn: mock(() => {}), - error: mock(() => {}), - debug: mock(() => {}), - fatal: mock(() => {}), - trace: mock(() => {}), - child: mock(() => ({ - info: mock(() => {}), - warn: mock(() => {}), - error: mock(() => {}), - debug: mock(() => {}), - fatal: mock(() => {}), - trace: mock(() => {}), - })) - })) - })); - - mock.module('pino-pretty', () => ({ - default: mock(() => ({})) - })); - - mock.module('pino-loki', () => ({ - default: mock(() => ({})) - })); -}); - -beforeEach(() => { - // Reset database state - if (pgMem) { - try { - pgMem.public.none('DROP TABLE IF EXISTS ohlcv CASCADE'); - pgMem.public.none('DROP TABLE IF EXISTS trades CASCADE'); - pgMem.public.none('DROP TABLE IF EXISTS quotes CASCADE'); - pgMem.public.none('DROP TABLE IF EXISTS indicators CASCADE'); - pgMem.public.none('DROP TABLE IF EXISTS performance CASCADE'); - pgMem.public.none('DROP TABLE IF EXISTS risk_metrics CASCADE'); - } catch (error) { - // Tables might not exist, ignore errors - } - } // Reset fetch mock - if ((global as any).fetch) { - ((global as any).fetch as any).mockClear?.(); - } -}); - -/** - * QuestDB-specific test utilities - */ -export const questdbTestHelpers = { - /** - * Get mock PostgreSQL adapter - */ - getMockPgAdapter: () => pgMem?.adapters?.createPg?.(), - - /** - * Execute SQL in mock database - */ - executeMockSQL: (sql: string, params?: any[]) => { - return pgMem?.public?.query(sql, params); - }, - /** - * Mock successful QuestDB HTTP response - */ mockQuestDBHttpSuccess: (data: any) => { - ((global as any).fetch as any).mockResolvedValue?.({ - ok: true, - status: 200, - json: async () => data, - text: async () => JSON.stringify(data) - }); - }, - - /** - * Mock QuestDB HTTP error - */ mockQuestDBHttpError: (status: number, message: string) => { - ((global as any).fetch as any).mockResolvedValue?.({ - ok: false, - status, - json: async () => ({ error: message }), - text: async () => message - }); - }, - - /** - * Mock InfluxDB line protocol response - */ mockInfluxDBSuccess: () => { - ((global as any).fetch as any).mockResolvedValue?.({ - ok: true, - status: 204, - text: async () => '' - }); - }, - - /** - * Create test OHLCV table - */ - createTestOHLCVTable: () => { - const sql = ` - CREATE TABLE ohlcv ( - symbol VARCHAR(10), - timestamp TIMESTAMP, - open DECIMAL(10,2), - high DECIMAL(10,2), - low DECIMAL(10,2), - close DECIMAL(10,2), - volume BIGINT, - source VARCHAR(50) - ) - `; - return pgMem?.public?.none(sql); - }, - - /** - * Insert test OHLCV data - */ - insertTestOHLCVData: (data: any[]) => { - const sql = ` - INSERT INTO ohlcv (symbol, timestamp, open, high, low, close, volume, source) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8) - `; - - return Promise.all( - data.map(row => - pgMem?.public?.none(sql, [ - row.symbol, - row.timestamp, - row.open, - row.high, - row.low, - row.close, - row.volume, - row.source || 'test' - ]) - ) - ); - }, - - /** - * Generate InfluxDB line protocol test data - */ - generateInfluxDBLines: (count: number = 5) => { - const lines: string[] = []; - const baseTime = Date.now() * 1000000; // Convert to nanoseconds - - for (let i = 0; i < count; i++) { - const time = baseTime + (i * 60000000000); // 1 minute intervals - const price = 150 + Math.random() * 10; - - lines.push( - `ohlcv,symbol=TEST open=${price},high=${price + 1},low=${price - 1},close=${price + 0.5},volume=1000i ${time}` - ); - } - - return lines; - }, - - /** - * Validate QuestDB query syntax - */ - validateQuestDBQuery: (query: string): boolean => { - // Basic validation for QuestDB-specific syntax - const questdbKeywords = [ - 'SAMPLE BY', - 'LATEST BY', - 'ASOF JOIN', - 'SPLICE JOIN', - 'LT JOIN' - ]; - - // Check for valid SQL structure - const hasSelect = /SELECT\s+/i.test(query); - const hasFrom = /FROM\s+/i.test(query); - - return hasSelect && hasFrom; - }, - - /** - * Mock connection pool - */ createMockPool: () => { - const mockQuery = () => Promise.resolve({ rows: [], rowCount: 0 }); - const mockRelease = () => {}; - const mockConnect = () => Promise.resolve({ - query: mockQuery, - release: mockRelease - }); - const mockEnd = () => Promise.resolve(undefined); - - return { - connect: mockConnect, - end: mockEnd, - totalCount: 0, - idleCount: 0, - waitingCount: 0 - }; - } -}; +/** + * QuestDB Client Test Setup + * + * Setup file specific to QuestDB client library tests. + * Provides utilities and mocks for testing database operations. + */ + +import { newDb } from 'pg-mem'; +import { mock, spyOn, beforeAll, beforeEach } from 'bun:test'; + +// Mock PostgreSQL database for unit tests +let pgMem: any; + +beforeAll(() => { + // Create in-memory PostgreSQL database + pgMem = newDb(); + + // Register QuestDB-specific functions + pgMem.public.registerFunction({ + name: 'now', + implementation: () => new Date().toISOString() + }); + + pgMem.public.registerFunction({ + name: 'dateadd', + args: [{ type: 'text' }, { type: 'int' }, { type: 'timestamp' }], + returns: 'timestamp', + implementation: (unit: string, amount: number, date: Date) => { + const result = new Date(date); + switch (unit) { + case 'd': + case 'day': + result.setDate(result.getDate() + amount); + break; + case 'h': + case 'hour': + result.setHours(result.getHours() + amount); + break; + case 'm': + case 'minute': + result.setMinutes(result.getMinutes() + amount); + break; + default: + throw new Error(`Unsupported date unit: ${unit}`); + } + return result; + } }); // Mock QuestDB HTTP client + // Mock fetch using Bun's built-in mock + (global as any).fetch = mock(() => {}); + + // Mock the logger module to avoid Pino configuration conflicts + mock.module('@stock-bot/logger', () => ({ + Logger: mock(() => ({ + info: mock(() => {}), + warn: mock(() => {}), + error: mock(() => {}), + debug: mock(() => {}), + fatal: mock(() => {}), + trace: mock(() => {}), + child: mock(() => ({ + info: mock(() => {}), + warn: mock(() => {}), + error: mock(() => {}), + debug: mock(() => {}), + fatal: mock(() => {}), + trace: mock(() => {}), + })) + })), + getLogger: mock(() => ({ + info: mock(() => {}), + warn: mock(() => {}), + error: mock(() => {}), + debug: mock(() => {}), + fatal: mock(() => {}), + trace: mock(() => {}), + child: mock(() => ({ + info: mock(() => {}), + warn: mock(() => {}), + error: mock(() => {}), + debug: mock(() => {}), + fatal: mock(() => {}), + trace: mock(() => {}), + })) + })) + })); + + // Mock Pino and its transports to avoid configuration conflicts + mock.module('pino', () => ({ + default: mock(() => ({ + info: mock(() => {}), + warn: mock(() => {}), + error: mock(() => {}), + debug: mock(() => {}), + fatal: mock(() => {}), + trace: mock(() => {}), + child: mock(() => ({ + info: mock(() => {}), + warn: mock(() => {}), + error: mock(() => {}), + debug: mock(() => {}), + fatal: mock(() => {}), + trace: mock(() => {}), + })) + })) + })); + + mock.module('pino-pretty', () => ({ + default: mock(() => ({})) + })); + + mock.module('pino-loki', () => ({ + default: mock(() => ({})) + })); +}); + +beforeEach(() => { + // Reset database state + if (pgMem) { + try { + pgMem.public.none('DROP TABLE IF EXISTS ohlcv CASCADE'); + pgMem.public.none('DROP TABLE IF EXISTS trades CASCADE'); + pgMem.public.none('DROP TABLE IF EXISTS quotes CASCADE'); + pgMem.public.none('DROP TABLE IF EXISTS indicators CASCADE'); + pgMem.public.none('DROP TABLE IF EXISTS performance CASCADE'); + pgMem.public.none('DROP TABLE IF EXISTS risk_metrics CASCADE'); + } catch (error) { + // Tables might not exist, ignore errors + } + } // Reset fetch mock + if ((global as any).fetch) { + ((global as any).fetch as any).mockClear?.(); + } +}); + +/** + * QuestDB-specific test utilities + */ +export const questdbTestHelpers = { + /** + * Get mock PostgreSQL adapter + */ + getMockPgAdapter: () => pgMem?.adapters?.createPg?.(), + + /** + * Execute SQL in mock database + */ + executeMockSQL: (sql: string, params?: any[]) => { + return pgMem?.public?.query(sql, params); + }, + /** + * Mock successful QuestDB HTTP response + */ mockQuestDBHttpSuccess: (data: any) => { + ((global as any).fetch as any).mockResolvedValue?.({ + ok: true, + status: 200, + json: async () => data, + text: async () => JSON.stringify(data) + }); + }, + + /** + * Mock QuestDB HTTP error + */ mockQuestDBHttpError: (status: number, message: string) => { + ((global as any).fetch as any).mockResolvedValue?.({ + ok: false, + status, + json: async () => ({ error: message }), + text: async () => message + }); + }, + + /** + * Mock InfluxDB line protocol response + */ mockInfluxDBSuccess: () => { + ((global as any).fetch as any).mockResolvedValue?.({ + ok: true, + status: 204, + text: async () => '' + }); + }, + + /** + * Create test OHLCV table + */ + createTestOHLCVTable: () => { + const sql = ` + CREATE TABLE ohlcv ( + symbol VARCHAR(10), + timestamp TIMESTAMP, + open DECIMAL(10,2), + high DECIMAL(10,2), + low DECIMAL(10,2), + close DECIMAL(10,2), + volume BIGINT, + source VARCHAR(50) + ) + `; + return pgMem?.public?.none(sql); + }, + + /** + * Insert test OHLCV data + */ + insertTestOHLCVData: (data: any[]) => { + const sql = ` + INSERT INTO ohlcv (symbol, timestamp, open, high, low, close, volume, source) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + `; + + return Promise.all( + data.map(row => + pgMem?.public?.none(sql, [ + row.symbol, + row.timestamp, + row.open, + row.high, + row.low, + row.close, + row.volume, + row.source || 'test' + ]) + ) + ); + }, + + /** + * Generate InfluxDB line protocol test data + */ + generateInfluxDBLines: (count: number = 5) => { + const lines: string[] = []; + const baseTime = Date.now() * 1000000; // Convert to nanoseconds + + for (let i = 0; i < count; i++) { + const time = baseTime + (i * 60000000000); // 1 minute intervals + const price = 150 + Math.random() * 10; + + lines.push( + `ohlcv,symbol=TEST open=${price},high=${price + 1},low=${price - 1},close=${price + 0.5},volume=1000i ${time}` + ); + } + + return lines; + }, + + /** + * Validate QuestDB query syntax + */ + validateQuestDBQuery: (query: string): boolean => { + // Basic validation for QuestDB-specific syntax + const questdbKeywords = [ + 'SAMPLE BY', + 'LATEST BY', + 'ASOF JOIN', + 'SPLICE JOIN', + 'LT JOIN' + ]; + + // Check for valid SQL structure + const hasSelect = /SELECT\s+/i.test(query); + const hasFrom = /FROM\s+/i.test(query); + + return hasSelect && hasFrom; + }, + + /** + * Mock connection pool + */ createMockPool: () => { + const mockQuery = () => Promise.resolve({ rows: [], rowCount: 0 }); + const mockRelease = () => {}; + const mockConnect = () => Promise.resolve({ + query: mockQuery, + release: mockRelease + }); + const mockEnd = () => Promise.resolve(undefined); + + return { + connect: mockConnect, + end: mockEnd, + totalCount: 0, + idleCount: 0, + waitingCount: 0 + }; + } +}; diff --git a/libs/questdb-client/tsconfig.json b/libs/questdb-client/tsconfig.json index 3030b42..e8f78e0 100644 --- a/libs/questdb-client/tsconfig.json +++ b/libs/questdb-client/tsconfig.json @@ -1,13 +1,13 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../config" }, - { "path": "../logger" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../config" }, + { "path": "../logger" } + ] +} diff --git a/libs/questdb-client/turbo.json b/libs/questdb-client/turbo.json index 792d858..92c4460 100644 --- a/libs/questdb-client/turbo.json +++ b/libs/questdb-client/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/shutdown/README.md b/libs/shutdown/README.md index aaefde5..f0ecc7d 100644 --- a/libs/shutdown/README.md +++ b/libs/shutdown/README.md @@ -1,202 +1,202 @@ -# @stock-bot/shutdown - -Shutdown management library for Node.js applications in the Stock Bot platform. - -## Features - -- βœ… **Automatic Signal Handling** - SIGTERM, SIGINT, SIGUSR2 (Unix), uncaught exceptions -- βœ… **Platform Support** - Windows and Unix/Linux compatible -- βœ… **Multiple Callbacks** - Register multiple cleanup functions -- βœ… **Timeout Protection** - Configurable shutdown timeout -- βœ… **Error Handling** - Failed callbacks don't block shutdown -- βœ… **TypeScript Support** - Full type definitions -- βœ… **Zero Dependencies** - Lightweight and efficient - -## Installation - -```bash -bun add @stock-bot/shutdown -``` - -## Quick Start - -```typescript -import { onShutdown, setShutdownTimeout } from '@stock-bot/shutdown'; - -// Configure shutdown timeout (optional, default: 30 seconds) -setShutdownTimeout(15000); // 15 seconds - -// Register cleanup callbacks -onShutdown(async () => { - console.log('Closing database connections...'); - await database.close(); -}); - -onShutdown(async () => { - console.log('Stopping background jobs...'); - await jobQueue.stop(); -}); - -onShutdown(() => { - console.log('Final cleanup...'); - // Synchronous cleanup -}); - -console.log('Application started. Press Ctrl+C to test graceful shutdown.'); -``` - -## API Reference - -### Convenience Functions - -#### `onShutdown(callback)` -Register a cleanup callback. - -```typescript -onShutdown(async () => { - await cleanup(); -}); -``` - -#### `setShutdownTimeout(timeout)` -Set shutdown timeout in milliseconds. - -```typescript -setShutdownTimeout(30000); // 30 seconds -``` - -#### `initiateShutdown(signal?)` -Manually trigger shutdown. - -```typescript -const result = await initiateShutdown('manual'); -console.log(result.success); // true/false -``` - -#### `shutdownAndExit(signal?, exitCode?)` -Trigger shutdown and exit process. - -```typescript -await shutdownAndExit('manual', 0); -``` - -### Advanced Usage - -#### Manual Instance Management - -```typescript -import { Shutdown } from '@stock-bot/shutdown'; - -const shutdown = new Shutdown({ - timeout: 20000, - autoRegister: true -}); - -shutdown.onShutdown(async () => { - await cleanup(); -}); - -// Manual shutdown -const result = await shutdown.shutdown('manual'); -``` - -#### Configuration Options - -```typescript -interface ShutdownOptions { - timeout?: number; // Timeout in ms (default: 30000) - autoRegister?: boolean; // Auto-register signals (default: true) -} -``` - -#### Shutdown Result - -```typescript -interface ShutdownResult { - success: boolean; - callbacksExecuted: number; - callbacksFailed: number; - duration: number; - error?: string; -} -``` - -## Examples - -### Express Server - -```typescript -import express from 'express'; -import { onShutdown, setShutdownTimeout } from '@stock-bot/shutdown'; - -const app = express(); -const server = app.listen(3000); - -setShutdownTimeout(10000); - -onShutdown(async () => { - console.log('Closing HTTP server...'); - await new Promise(resolve => server.close(resolve)); -}); - -onShutdown(async () => { - console.log('Closing database...'); - await database.close(); -}); -``` - -### Worker Process - -```typescript -import { onShutdown } from '@stock-bot/shutdown'; - -let isRunning = true; - -onShutdown(() => { - console.log('Stopping worker...'); - isRunning = false; -}); - -// Worker loop -while (isRunning) { - await processJob(); - await new Promise(resolve => setTimeout(resolve, 1000)); -} -``` - -## Signal Handling - -The library automatically handles these signals: - -- **SIGTERM** - Termination request -- **SIGINT** - Interrupt (Ctrl+C) -- **SIGUSR2** - User-defined signal (Unix only) -- **uncaughtException** - Unhandled exceptions -- **unhandledRejection** - Unhandled promise rejections - -On Windows, only SIGTERM and SIGINT are supported due to platform limitations. - -## Best Practices - -1. **Register callbacks early** in your application startup -2. **Keep callbacks simple** and focused on cleanup -3. **Use appropriate timeouts** based on your cleanup needs -4. **Handle errors gracefully** in callbacks -5. **Test shutdown behavior** in your CI/CD pipeline - -## Testing - -```typescript -import { resetShutdown, onShutdown } from '@stock-bot/shutdown'; - -beforeEach(() => { - resetShutdown(); // Clear previous state -}); - -test('should register shutdown callback', () => { - let cleaned = false; - onShutdown(() => { cleaned = true; }); - - // Test shutdown behavior -}); -``` +# @stock-bot/shutdown + +Shutdown management library for Node.js applications in the Stock Bot platform. + +## Features + +- βœ… **Automatic Signal Handling** - SIGTERM, SIGINT, SIGUSR2 (Unix), uncaught exceptions +- βœ… **Platform Support** - Windows and Unix/Linux compatible +- βœ… **Multiple Callbacks** - Register multiple cleanup functions +- βœ… **Timeout Protection** - Configurable shutdown timeout +- βœ… **Error Handling** - Failed callbacks don't block shutdown +- βœ… **TypeScript Support** - Full type definitions +- βœ… **Zero Dependencies** - Lightweight and efficient + +## Installation + +```bash +bun add @stock-bot/shutdown +``` + +## Quick Start + +```typescript +import { onShutdown, setShutdownTimeout } from '@stock-bot/shutdown'; + +// Configure shutdown timeout (optional, default: 30 seconds) +setShutdownTimeout(15000); // 15 seconds + +// Register cleanup callbacks +onShutdown(async () => { + console.log('Closing database connections...'); + await database.close(); +}); + +onShutdown(async () => { + console.log('Stopping background jobs...'); + await jobQueue.stop(); +}); + +onShutdown(() => { + console.log('Final cleanup...'); + // Synchronous cleanup +}); + +console.log('Application started. Press Ctrl+C to test graceful shutdown.'); +``` + +## API Reference + +### Convenience Functions + +#### `onShutdown(callback)` +Register a cleanup callback. + +```typescript +onShutdown(async () => { + await cleanup(); +}); +``` + +#### `setShutdownTimeout(timeout)` +Set shutdown timeout in milliseconds. + +```typescript +setShutdownTimeout(30000); // 30 seconds +``` + +#### `initiateShutdown(signal?)` +Manually trigger shutdown. + +```typescript +const result = await initiateShutdown('manual'); +console.log(result.success); // true/false +``` + +#### `shutdownAndExit(signal?, exitCode?)` +Trigger shutdown and exit process. + +```typescript +await shutdownAndExit('manual', 0); +``` + +### Advanced Usage + +#### Manual Instance Management + +```typescript +import { Shutdown } from '@stock-bot/shutdown'; + +const shutdown = new Shutdown({ + timeout: 20000, + autoRegister: true +}); + +shutdown.onShutdown(async () => { + await cleanup(); +}); + +// Manual shutdown +const result = await shutdown.shutdown('manual'); +``` + +#### Configuration Options + +```typescript +interface ShutdownOptions { + timeout?: number; // Timeout in ms (default: 30000) + autoRegister?: boolean; // Auto-register signals (default: true) +} +``` + +#### Shutdown Result + +```typescript +interface ShutdownResult { + success: boolean; + callbacksExecuted: number; + callbacksFailed: number; + duration: number; + error?: string; +} +``` + +## Examples + +### Express Server + +```typescript +import express from 'express'; +import { onShutdown, setShutdownTimeout } from '@stock-bot/shutdown'; + +const app = express(); +const server = app.listen(3000); + +setShutdownTimeout(10000); + +onShutdown(async () => { + console.log('Closing HTTP server...'); + await new Promise(resolve => server.close(resolve)); +}); + +onShutdown(async () => { + console.log('Closing database...'); + await database.close(); +}); +``` + +### Worker Process + +```typescript +import { onShutdown } from '@stock-bot/shutdown'; + +let isRunning = true; + +onShutdown(() => { + console.log('Stopping worker...'); + isRunning = false; +}); + +// Worker loop +while (isRunning) { + await processJob(); + await new Promise(resolve => setTimeout(resolve, 1000)); +} +``` + +## Signal Handling + +The library automatically handles these signals: + +- **SIGTERM** - Termination request +- **SIGINT** - Interrupt (Ctrl+C) +- **SIGUSR2** - User-defined signal (Unix only) +- **uncaughtException** - Unhandled exceptions +- **unhandledRejection** - Unhandled promise rejections + +On Windows, only SIGTERM and SIGINT are supported due to platform limitations. + +## Best Practices + +1. **Register callbacks early** in your application startup +2. **Keep callbacks simple** and focused on cleanup +3. **Use appropriate timeouts** based on your cleanup needs +4. **Handle errors gracefully** in callbacks +5. **Test shutdown behavior** in your CI/CD pipeline + +## Testing + +```typescript +import { resetShutdown, onShutdown } from '@stock-bot/shutdown'; + +beforeEach(() => { + resetShutdown(); // Clear previous state +}); + +test('should register shutdown callback', () => { + let cleaned = false; + onShutdown(() => { cleaned = true; }); + + // Test shutdown behavior +}); +``` diff --git a/libs/shutdown/package.json b/libs/shutdown/package.json index c697a17..7218a62 100644 --- a/libs/shutdown/package.json +++ b/libs/shutdown/package.json @@ -1,26 +1,26 @@ -{ - "name": "@stock-bot/shutdown", - "version": "1.0.0", - "description": "Graceful shutdown management for Stock Bot platform", - "type": "module", - "main": "dist/index.js", - "types": "dist/index.d.ts", "scripts": { - "build": "tsc", - "clean": "rm -rf dist", - "test": "bun test" - }, - "dependencies": {}, - "devDependencies": { - "typescript": "^5.0.0", - "@types/node": "^20.0.0" - }, - "exports": { - ".": { - "import": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist" - ] -} +{ + "name": "@stock-bot/shutdown", + "version": "1.0.0", + "description": "Graceful shutdown management for Stock Bot platform", + "type": "module", + "main": "dist/index.js", + "types": "dist/index.d.ts", "scripts": { + "build": "tsc", + "clean": "rm -rf dist", + "test": "bun test" + }, + "dependencies": {}, + "devDependencies": { + "typescript": "^5.0.0", + "@types/node": "^20.0.0" + }, + "exports": { + ".": { + "import": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist" + ] +} diff --git a/libs/shutdown/src/index.ts b/libs/shutdown/src/index.ts index 0ba4e0d..85cf171 100644 --- a/libs/shutdown/src/index.ts +++ b/libs/shutdown/src/index.ts @@ -1,79 +1,79 @@ -/** - * @stock-bot/shutdown - Shutdown management library - * - * Main exports for the shutdown library - */ - -// Core shutdown classes and types -export { Shutdown } from './shutdown'; -export type { ShutdownCallback, ShutdownOptions, ShutdownResult } from './types'; - -import { Shutdown } from './shutdown'; -import type { ShutdownResult } from './types'; - -// Global singleton instance -let globalInstance: Shutdown | null = null; - -/** - * Get the global shutdown instance (creates one if it doesn't exist) - */ -function getGlobalInstance(): Shutdown { - if (!globalInstance) { - globalInstance = Shutdown.getInstance(); - } - return globalInstance; -} - -/** - * Convenience functions for global shutdown management - */ - -/** - * Register a cleanup callback that will be executed during shutdown - */ -export function onShutdown(callback: () => Promise | void): void { - getGlobalInstance().onShutdown(callback); -} - -/** - * Set the shutdown timeout in milliseconds - */ -export function setShutdownTimeout(timeout: number): void { - getGlobalInstance().setTimeout(timeout); -} - -/** - * Check if shutdown is currently in progress - */ -export function isShuttingDown(): boolean { - return globalInstance?.isShutdownInProgress() || false; -} - -/** - * Get the number of registered shutdown callbacks - */ -export function getShutdownCallbackCount(): number { - return globalInstance?.getCallbackCount() || 0; -} - -/** - * Manually initiate graceful shutdown - */ -export function initiateShutdown(signal?: string): Promise { - return getGlobalInstance().shutdown(signal); -} - -/** - * Manually initiate graceful shutdown and exit the process - */ -export function shutdownAndExit(signal?: string, exitCode = 0): Promise { - return getGlobalInstance().shutdownAndExit(signal, exitCode); -} - -/** - * Reset the global instance (mainly for testing) - */ -export function resetShutdown(): void { - globalInstance = null; - Shutdown.reset(); -} +/** + * @stock-bot/shutdown - Shutdown management library + * + * Main exports for the shutdown library + */ + +// Core shutdown classes and types +export { Shutdown } from './shutdown'; +export type { ShutdownCallback, ShutdownOptions, ShutdownResult } from './types'; + +import { Shutdown } from './shutdown'; +import type { ShutdownResult } from './types'; + +// Global singleton instance +let globalInstance: Shutdown | null = null; + +/** + * Get the global shutdown instance (creates one if it doesn't exist) + */ +function getGlobalInstance(): Shutdown { + if (!globalInstance) { + globalInstance = Shutdown.getInstance(); + } + return globalInstance; +} + +/** + * Convenience functions for global shutdown management + */ + +/** + * Register a cleanup callback that will be executed during shutdown + */ +export function onShutdown(callback: () => Promise | void): void { + getGlobalInstance().onShutdown(callback); +} + +/** + * Set the shutdown timeout in milliseconds + */ +export function setShutdownTimeout(timeout: number): void { + getGlobalInstance().setTimeout(timeout); +} + +/** + * Check if shutdown is currently in progress + */ +export function isShuttingDown(): boolean { + return globalInstance?.isShutdownInProgress() || false; +} + +/** + * Get the number of registered shutdown callbacks + */ +export function getShutdownCallbackCount(): number { + return globalInstance?.getCallbackCount() || 0; +} + +/** + * Manually initiate graceful shutdown + */ +export function initiateShutdown(signal?: string): Promise { + return getGlobalInstance().shutdown(signal); +} + +/** + * Manually initiate graceful shutdown and exit the process + */ +export function shutdownAndExit(signal?: string, exitCode = 0): Promise { + return getGlobalInstance().shutdownAndExit(signal, exitCode); +} + +/** + * Reset the global instance (mainly for testing) + */ +export function resetShutdown(): void { + globalInstance = null; + Shutdown.reset(); +} diff --git a/libs/shutdown/src/shutdown.ts b/libs/shutdown/src/shutdown.ts index 77e6659..97e6b23 100644 --- a/libs/shutdown/src/shutdown.ts +++ b/libs/shutdown/src/shutdown.ts @@ -1,198 +1,198 @@ -/** - * Shutdown management for Node.js applications - * - * Features: - * - Automatic signal handling (SIGTERM, SIGINT, etc.) - * - Configurable shutdown timeout - * - Multiple cleanup callbacks with error handling - * - Platform-specific signal support (Windows/Unix) - */ - -import type { ShutdownCallback, ShutdownOptions, ShutdownResult } from './types'; - -export class Shutdown { - private static instance: Shutdown | null = null; - private isShuttingDown = false; - private shutdownTimeout = 30000; // 30 seconds default - private callbacks: ShutdownCallback[] = []; - private signalHandlersRegistered = false; - - constructor(options: ShutdownOptions = {}) { - this.shutdownTimeout = options.timeout || 30000; - - if (options.autoRegister !== false) { - this.setupSignalHandlers(); - } - } - - /** - * Get or create singleton instance - */ - static getInstance(options?: ShutdownOptions): Shutdown { - if (!Shutdown.instance) { - Shutdown.instance = new Shutdown(options); - } - return Shutdown.instance; - } - - /** - * Reset singleton instance (mainly for testing) - */ - static reset(): void { - Shutdown.instance = null; - } - - /** - * Register a cleanup callback - */ - onShutdown(callback: ShutdownCallback): void { - if (this.isShuttingDown) { - return; - } - this.callbacks.push(callback); - } - - /** - * Set shutdown timeout in milliseconds - */ - setTimeout(timeout: number): void { - if (timeout <= 0) { - throw new Error('Shutdown timeout must be positive'); - } - this.shutdownTimeout = timeout; - } - - /** - * Get current shutdown state - */ - isShutdownInProgress(): boolean { - return this.isShuttingDown; - } - - /** - * Get number of registered callbacks - */ - getCallbackCount(): number { - return this.callbacks.length; - } - - /** - * Initiate graceful shutdown - */ - async shutdown(signal?: string): Promise { - if (this.isShuttingDown) { - return { - success: false, - callbacksExecuted: 0, - callbacksFailed: 0, - duration: 0, - error: 'Shutdown already in progress' - }; - } - - this.isShuttingDown = true; - const startTime = Date.now(); - - const shutdownPromise = this.executeCallbacks(); - const timeoutPromise = new Promise((_, reject) => { - setTimeout(() => reject(new Error('Shutdown timeout')), this.shutdownTimeout); - }); - - let result: ShutdownResult; - - try { - const callbackResult = await Promise.race([shutdownPromise, timeoutPromise]); - const duration = Date.now() - startTime; - - result = { - success: true, - callbacksExecuted: callbackResult.executed, - callbacksFailed: callbackResult.failed, - duration, - error: callbackResult.failed > 0 ? `${callbackResult.failed} callbacks failed` : undefined - }; - } catch (error) { - const duration = Date.now() - startTime; - const errorMessage = error instanceof Error ? error.message : String(error); - - result = { - success: false, - callbacksExecuted: 0, - callbacksFailed: 0, - duration, - error: errorMessage - }; - } - - // Don't call process.exit here - let the caller decide - return result; - } - - /** - * Initiate shutdown and exit process - */ - async shutdownAndExit(signal?: string, exitCode = 0): Promise { - const result = await this.shutdown(signal); - const finalExitCode = result.success ? exitCode : 1; - - process.exit(finalExitCode); - } - - /** - * Execute all registered callbacks - */ - private async executeCallbacks(): Promise<{ executed: number; failed: number }> { - if (this.callbacks.length === 0) { - return { executed: 0, failed: 0 }; - } - - const results = await Promise.allSettled( - this.callbacks.map(async (callback) => { - await callback(); - }) - ); - - const failed = results.filter(result => result.status === 'rejected').length; - const executed = results.length; - - return { executed, failed }; - } - - /** - * Setup signal handlers for graceful shutdown - */ - private setupSignalHandlers(): void { - if (this.signalHandlersRegistered) { - return; - } - - // Platform-specific signals - const signals: NodeJS.Signals[] = process.platform === 'win32' - ? ['SIGINT', 'SIGTERM'] - : ['SIGTERM', 'SIGINT', 'SIGUSR2']; - - signals.forEach(signal => { - process.on(signal, () => { - this.shutdownAndExit(signal).catch(() => { - process.exit(1); - }); - }); - }); - - // Handle uncaught exceptions - process.on('uncaughtException', () => { - this.shutdownAndExit('uncaughtException', 1).catch(() => { - process.exit(1); - }); - }); - - // Handle unhandled promise rejections - process.on('unhandledRejection', () => { - this.shutdownAndExit('unhandledRejection', 1).catch(() => { - process.exit(1); - }); - }); - - this.signalHandlersRegistered = true; - } -} +/** + * Shutdown management for Node.js applications + * + * Features: + * - Automatic signal handling (SIGTERM, SIGINT, etc.) + * - Configurable shutdown timeout + * - Multiple cleanup callbacks with error handling + * - Platform-specific signal support (Windows/Unix) + */ + +import type { ShutdownCallback, ShutdownOptions, ShutdownResult } from './types'; + +export class Shutdown { + private static instance: Shutdown | null = null; + private isShuttingDown = false; + private shutdownTimeout = 30000; // 30 seconds default + private callbacks: ShutdownCallback[] = []; + private signalHandlersRegistered = false; + + constructor(options: ShutdownOptions = {}) { + this.shutdownTimeout = options.timeout || 30000; + + if (options.autoRegister !== false) { + this.setupSignalHandlers(); + } + } + + /** + * Get or create singleton instance + */ + static getInstance(options?: ShutdownOptions): Shutdown { + if (!Shutdown.instance) { + Shutdown.instance = new Shutdown(options); + } + return Shutdown.instance; + } + + /** + * Reset singleton instance (mainly for testing) + */ + static reset(): void { + Shutdown.instance = null; + } + + /** + * Register a cleanup callback + */ + onShutdown(callback: ShutdownCallback): void { + if (this.isShuttingDown) { + return; + } + this.callbacks.push(callback); + } + + /** + * Set shutdown timeout in milliseconds + */ + setTimeout(timeout: number): void { + if (timeout <= 0) { + throw new Error('Shutdown timeout must be positive'); + } + this.shutdownTimeout = timeout; + } + + /** + * Get current shutdown state + */ + isShutdownInProgress(): boolean { + return this.isShuttingDown; + } + + /** + * Get number of registered callbacks + */ + getCallbackCount(): number { + return this.callbacks.length; + } + + /** + * Initiate graceful shutdown + */ + async shutdown(signal?: string): Promise { + if (this.isShuttingDown) { + return { + success: false, + callbacksExecuted: 0, + callbacksFailed: 0, + duration: 0, + error: 'Shutdown already in progress' + }; + } + + this.isShuttingDown = true; + const startTime = Date.now(); + + const shutdownPromise = this.executeCallbacks(); + const timeoutPromise = new Promise((_, reject) => { + setTimeout(() => reject(new Error('Shutdown timeout')), this.shutdownTimeout); + }); + + let result: ShutdownResult; + + try { + const callbackResult = await Promise.race([shutdownPromise, timeoutPromise]); + const duration = Date.now() - startTime; + + result = { + success: true, + callbacksExecuted: callbackResult.executed, + callbacksFailed: callbackResult.failed, + duration, + error: callbackResult.failed > 0 ? `${callbackResult.failed} callbacks failed` : undefined + }; + } catch (error) { + const duration = Date.now() - startTime; + const errorMessage = error instanceof Error ? error.message : String(error); + + result = { + success: false, + callbacksExecuted: 0, + callbacksFailed: 0, + duration, + error: errorMessage + }; + } + + // Don't call process.exit here - let the caller decide + return result; + } + + /** + * Initiate shutdown and exit process + */ + async shutdownAndExit(signal?: string, exitCode = 0): Promise { + const result = await this.shutdown(signal); + const finalExitCode = result.success ? exitCode : 1; + + process.exit(finalExitCode); + } + + /** + * Execute all registered callbacks + */ + private async executeCallbacks(): Promise<{ executed: number; failed: number }> { + if (this.callbacks.length === 0) { + return { executed: 0, failed: 0 }; + } + + const results = await Promise.allSettled( + this.callbacks.map(async (callback) => { + await callback(); + }) + ); + + const failed = results.filter(result => result.status === 'rejected').length; + const executed = results.length; + + return { executed, failed }; + } + + /** + * Setup signal handlers for graceful shutdown + */ + private setupSignalHandlers(): void { + if (this.signalHandlersRegistered) { + return; + } + + // Platform-specific signals + const signals: NodeJS.Signals[] = process.platform === 'win32' + ? ['SIGINT', 'SIGTERM'] + : ['SIGTERM', 'SIGINT', 'SIGUSR2']; + + signals.forEach(signal => { + process.on(signal, () => { + this.shutdownAndExit(signal).catch(() => { + process.exit(1); + }); + }); + }); + + // Handle uncaught exceptions + process.on('uncaughtException', () => { + this.shutdownAndExit('uncaughtException', 1).catch(() => { + process.exit(1); + }); + }); + + // Handle unhandled promise rejections + process.on('unhandledRejection', () => { + this.shutdownAndExit('unhandledRejection', 1).catch(() => { + process.exit(1); + }); + }); + + this.signalHandlersRegistered = true; + } +} diff --git a/libs/shutdown/src/types.ts b/libs/shutdown/src/types.ts index 04e6457..a3d0d57 100644 --- a/libs/shutdown/src/types.ts +++ b/libs/shutdown/src/types.ts @@ -1,34 +1,34 @@ -/** - * Types for shutdown functionality - */ - -/** - * Callback function for shutdown cleanup - */ -export type ShutdownCallback = () => Promise | void; - -/** - * Options for configuring shutdown behavior - */ -export interface ShutdownOptions { - /** Timeout in milliseconds before forcing shutdown (default: 30000) */ - timeout?: number; - /** Whether to automatically register signal handlers (default: true) */ - autoRegister?: boolean; -} - -/** - * Shutdown result information - */ -export interface ShutdownResult { - /** Whether shutdown completed successfully */ - success: boolean; - /** Number of callbacks executed */ - callbacksExecuted: number; - /** Number of callbacks that failed */ - callbacksFailed: number; - /** Time taken for shutdown in milliseconds */ - duration: number; - /** Error message if shutdown failed */ - error?: string; -} +/** + * Types for shutdown functionality + */ + +/** + * Callback function for shutdown cleanup + */ +export type ShutdownCallback = () => Promise | void; + +/** + * Options for configuring shutdown behavior + */ +export interface ShutdownOptions { + /** Timeout in milliseconds before forcing shutdown (default: 30000) */ + timeout?: number; + /** Whether to automatically register signal handlers (default: true) */ + autoRegister?: boolean; +} + +/** + * Shutdown result information + */ +export interface ShutdownResult { + /** Whether shutdown completed successfully */ + success: boolean; + /** Number of callbacks executed */ + callbacksExecuted: number; + /** Number of callbacks that failed */ + callbacksFailed: number; + /** Time taken for shutdown in milliseconds */ + duration: number; + /** Error message if shutdown failed */ + error?: string; +} diff --git a/libs/shutdown/tsconfig.json b/libs/shutdown/tsconfig.json index 3030b42..e8f78e0 100644 --- a/libs/shutdown/tsconfig.json +++ b/libs/shutdown/tsconfig.json @@ -1,13 +1,13 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../config" }, - { "path": "../logger" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../config" }, + { "path": "../logger" } + ] +} diff --git a/libs/shutdown/turbo.json b/libs/shutdown/turbo.json index 7632db9..c630cca 100644 --- a/libs/shutdown/turbo.json +++ b/libs/shutdown/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/strategy-engine/src/index.ts b/libs/strategy-engine/src/index.ts index f2d9a37..3ce5021 100644 --- a/libs/strategy-engine/src/index.ts +++ b/libs/strategy-engine/src/index.ts @@ -1,370 +1,370 @@ -import { EventEmitter } from 'eventemitter3'; -import { getLogger } from '@stock-bot/logger'; -import { EventBus } from '@stock-bot/event-bus'; -import { DataFrame } from '@stock-bot/data-frame'; - -// Core types -export interface MarketData { - symbol: string; - timestamp: number; - open: number; - high: number; - low: number; - close: number; - volume: number; - [key: string]: any; -} - -export interface TradingSignal { - type: 'BUY' | 'SELL' | 'HOLD'; - symbol: string; - timestamp: number; - price: number; - quantity: number; - confidence: number; - reason: string; - metadata?: Record; -} - -export interface StrategyContext { - symbol: string; - timeframe: string; - data: DataFrame; - indicators: Record; - position?: Position; - portfolio: PortfolioSummary; - timestamp: number; -} - -export interface Position { - symbol: string; - quantity: number; - averagePrice: number; - currentPrice: number; - unrealizedPnL: number; - side: 'LONG' | 'SHORT'; -} - -export interface PortfolioSummary { - totalValue: number; - cash: number; - positions: Position[]; - totalPnL: number; - dayPnL: number; -} - -export interface StrategyConfig { - id: string; - name: string; - description?: string; - symbols: string[]; - timeframes: string[]; - parameters: Record; - riskLimits: RiskLimits; - enabled: boolean; -} - -export interface RiskLimits { - maxPositionSize: number; - maxDailyLoss: number; - maxDrawdown: number; - stopLoss?: number; - takeProfit?: number; -} - -// Abstract base strategy class -export abstract class BaseStrategy extends EventEmitter { - protected logger; - protected eventBus: EventBus; - protected config: StrategyConfig; - protected isActive: boolean = false; - - constructor(config: StrategyConfig, eventBus: EventBus) { - super(); - this.config = config; - this.eventBus = eventBus; - this.logger = getLogger(`strategy:${config.id}`); - } - - // Abstract methods that must be implemented by concrete strategies - abstract initialize(): Promise; - abstract onMarketData(context: StrategyContext): Promise; - abstract onSignal(signal: TradingSignal): Promise; - abstract cleanup(): Promise; - - // Optional lifecycle methods - onStart?(): Promise; - onStop?(): Promise; - onError?(error: Error): Promise; - - // Control methods - async start(): Promise { - if (this.isActive) { - this.logger.warn('Strategy already active'); - return; - } - - try { - await this.initialize(); - - if (this.onStart) { - await this.onStart(); - } - - this.isActive = true; - this.logger.info('Strategy started', { strategyId: this.config.id }); - this.emit('started'); - } catch (error) { - this.logger.error('Failed to start strategy', { error, strategyId: this.config.id }); - throw error; - } - } - - async stop(): Promise { - if (!this.isActive) { - this.logger.warn('Strategy not active'); - return; - } - - try { - if (this.onStop) { - await this.onStop(); - } - - await this.cleanup(); - this.isActive = false; - this.logger.info('Strategy stopped', { strategyId: this.config.id }); - this.emit('stopped'); - } catch (error) { - this.logger.error('Failed to stop strategy', { error, strategyId: this.config.id }); - throw error; - } - } - - // Utility methods - protected async emitSignal(signal: TradingSignal): Promise { - await this.eventBus.publish(this.config.id, signal); - this.emit('signal', signal); - this.logger.info('Signal generated', { - signal: signal.type, - symbol: signal.symbol, - confidence: signal.confidence - }); - } - - protected checkRiskLimits(signal: TradingSignal, context: StrategyContext): boolean { - const limits = this.config.riskLimits; - - // Check position size limit - if (signal.quantity > limits.maxPositionSize) { - this.logger.warn('Signal exceeds max position size', { - requested: signal.quantity, - limit: limits.maxPositionSize - }); - return false; - } - - // Check daily loss limit - if (context.portfolio.dayPnL <= -limits.maxDailyLoss) { - this.logger.warn('Daily loss limit reached', { - dayPnL: context.portfolio.dayPnL, - limit: -limits.maxDailyLoss - }); - return false; - } - - return true; - } - - // Getters - get id(): string { - return this.config.id; - } - - get name(): string { - return this.config.name; - } - - get active(): boolean { - return this.isActive; - } - - get configuration(): StrategyConfig { - return { ...this.config }; - } -} - -// Strategy execution engine -export class StrategyEngine extends EventEmitter { - private strategies: Map = new Map(); - private logger; - private eventBus: EventBus; - private isRunning: boolean = false; - - constructor(eventBus: EventBus) { - super(); - this.eventBus = eventBus; - this.logger = getLogger('strategy-engine'); - } - - async initialize(): Promise { - // Subscribe to market data events - await this.eventBus.subscribe('market.data', this.handleMarketData.bind(this)); - await this.eventBus.subscribe('order.update', this.handleOrderUpdate.bind(this)); - await this.eventBus.subscribe('portfolio.update', this.handlePortfolioUpdate.bind(this)); - - this.logger.info('Strategy engine initialized'); - } - - async registerStrategy(strategy: BaseStrategy): Promise { - if (this.strategies.has(strategy.id)) { - throw new Error(`Strategy ${strategy.id} already registered`); - } - - this.strategies.set(strategy.id, strategy); - - // Forward strategy events - strategy.on('signal', (signal) => this.emit('signal', signal)); - strategy.on('error', (error) => this.emit('error', error)); - - this.logger.info('Strategy registered', { strategyId: strategy.id }); - } - - async unregisterStrategy(strategyId: string): Promise { - const strategy = this.strategies.get(strategyId); - if (!strategy) { - throw new Error(`Strategy ${strategyId} not found`); - } - - if (strategy.active) { - await strategy.stop(); - } - - strategy.removeAllListeners(); - this.strategies.delete(strategyId); - - this.logger.info('Strategy unregistered', { strategyId }); - } - - async startStrategy(strategyId: string): Promise { - const strategy = this.strategies.get(strategyId); - if (!strategy) { - throw new Error(`Strategy ${strategyId} not found`); - } - - await strategy.start(); - } - - async stopStrategy(strategyId: string): Promise { - const strategy = this.strategies.get(strategyId); - if (!strategy) { - throw new Error(`Strategy ${strategyId} not found`); - } - - await strategy.stop(); - } - - async startAll(): Promise { - if (this.isRunning) { - this.logger.warn('Engine already running'); - return; - } - - const startPromises = Array.from(this.strategies.values()) - .filter(strategy => strategy.configuration.enabled) - .map(strategy => strategy.start()); - - await Promise.all(startPromises); - this.isRunning = true; - this.logger.info('All strategies started'); - this.emit('started'); - } - - async stopAll(): Promise { - if (!this.isRunning) { - this.logger.warn('Engine not running'); - return; - } - - const stopPromises = Array.from(this.strategies.values()) - .filter(strategy => strategy.active) - .map(strategy => strategy.stop()); - - await Promise.all(stopPromises); - this.isRunning = false; - this.logger.info('All strategies stopped'); - this.emit('stopped'); - } - - private async handleMarketData(message: any): Promise { - const { symbol, ...data } = message.data; - - // Find strategies that trade this symbol - const relevantStrategies = Array.from(this.strategies.values()) - .filter(strategy => - strategy.active && - strategy.configuration.symbols.includes(symbol) - ); - - for (const strategy of relevantStrategies) { - try { - // Create context for this strategy - const context: StrategyContext = { - symbol, - timeframe: '1m', // TODO: Get from strategy config - data: new DataFrame([data]), // TODO: Use historical data - indicators: {}, - portfolio: { - totalValue: 100000, // TODO: Get real portfolio data - cash: 50000, - positions: [], - totalPnL: 0, - dayPnL: 0 - }, - timestamp: data.timestamp - }; - - const signals = await strategy.onMarketData(context); - - for (const signal of signals) { - await strategy.onSignal(signal); - } - } catch (error) { - this.logger.error('Error processing market data for strategy', { - error, - strategyId: strategy.id, - symbol - }); - } - } - } - - private async handleOrderUpdate(message: any): Promise { - // Handle order updates - notify relevant strategies - this.logger.debug('Order update received', { data: message.data }); - } - - private async handlePortfolioUpdate(message: any): Promise { - // Handle portfolio updates - notify relevant strategies - this.logger.debug('Portfolio update received', { data: message.data }); - } - - getStrategy(strategyId: string): BaseStrategy | undefined { - return this.strategies.get(strategyId); - } - - getStrategies(): BaseStrategy[] { - return Array.from(this.strategies.values()); - } - - getActiveStrategies(): BaseStrategy[] { - return this.getStrategies().filter(strategy => strategy.active); - } - - async shutdown(): Promise { - await this.stopAll(); - this.strategies.clear(); - this.removeAllListeners(); - this.logger.info('Strategy engine shutdown'); - } +import { EventEmitter } from 'eventemitter3'; +import { getLogger } from '@stock-bot/logger'; +import { EventBus } from '@stock-bot/event-bus'; +import { DataFrame } from '@stock-bot/data-frame'; + +// Core types +export interface MarketData { + symbol: string; + timestamp: number; + open: number; + high: number; + low: number; + close: number; + volume: number; + [key: string]: any; +} + +export interface TradingSignal { + type: 'BUY' | 'SELL' | 'HOLD'; + symbol: string; + timestamp: number; + price: number; + quantity: number; + confidence: number; + reason: string; + metadata?: Record; +} + +export interface StrategyContext { + symbol: string; + timeframe: string; + data: DataFrame; + indicators: Record; + position?: Position; + portfolio: PortfolioSummary; + timestamp: number; +} + +export interface Position { + symbol: string; + quantity: number; + averagePrice: number; + currentPrice: number; + unrealizedPnL: number; + side: 'LONG' | 'SHORT'; +} + +export interface PortfolioSummary { + totalValue: number; + cash: number; + positions: Position[]; + totalPnL: number; + dayPnL: number; +} + +export interface StrategyConfig { + id: string; + name: string; + description?: string; + symbols: string[]; + timeframes: string[]; + parameters: Record; + riskLimits: RiskLimits; + enabled: boolean; +} + +export interface RiskLimits { + maxPositionSize: number; + maxDailyLoss: number; + maxDrawdown: number; + stopLoss?: number; + takeProfit?: number; +} + +// Abstract base strategy class +export abstract class BaseStrategy extends EventEmitter { + protected logger; + protected eventBus: EventBus; + protected config: StrategyConfig; + protected isActive: boolean = false; + + constructor(config: StrategyConfig, eventBus: EventBus) { + super(); + this.config = config; + this.eventBus = eventBus; + this.logger = getLogger(`strategy:${config.id}`); + } + + // Abstract methods that must be implemented by concrete strategies + abstract initialize(): Promise; + abstract onMarketData(context: StrategyContext): Promise; + abstract onSignal(signal: TradingSignal): Promise; + abstract cleanup(): Promise; + + // Optional lifecycle methods + onStart?(): Promise; + onStop?(): Promise; + onError?(error: Error): Promise; + + // Control methods + async start(): Promise { + if (this.isActive) { + this.logger.warn('Strategy already active'); + return; + } + + try { + await this.initialize(); + + if (this.onStart) { + await this.onStart(); + } + + this.isActive = true; + this.logger.info('Strategy started', { strategyId: this.config.id }); + this.emit('started'); + } catch (error) { + this.logger.error('Failed to start strategy', { error, strategyId: this.config.id }); + throw error; + } + } + + async stop(): Promise { + if (!this.isActive) { + this.logger.warn('Strategy not active'); + return; + } + + try { + if (this.onStop) { + await this.onStop(); + } + + await this.cleanup(); + this.isActive = false; + this.logger.info('Strategy stopped', { strategyId: this.config.id }); + this.emit('stopped'); + } catch (error) { + this.logger.error('Failed to stop strategy', { error, strategyId: this.config.id }); + throw error; + } + } + + // Utility methods + protected async emitSignal(signal: TradingSignal): Promise { + await this.eventBus.publish(this.config.id, signal); + this.emit('signal', signal); + this.logger.info('Signal generated', { + signal: signal.type, + symbol: signal.symbol, + confidence: signal.confidence + }); + } + + protected checkRiskLimits(signal: TradingSignal, context: StrategyContext): boolean { + const limits = this.config.riskLimits; + + // Check position size limit + if (signal.quantity > limits.maxPositionSize) { + this.logger.warn('Signal exceeds max position size', { + requested: signal.quantity, + limit: limits.maxPositionSize + }); + return false; + } + + // Check daily loss limit + if (context.portfolio.dayPnL <= -limits.maxDailyLoss) { + this.logger.warn('Daily loss limit reached', { + dayPnL: context.portfolio.dayPnL, + limit: -limits.maxDailyLoss + }); + return false; + } + + return true; + } + + // Getters + get id(): string { + return this.config.id; + } + + get name(): string { + return this.config.name; + } + + get active(): boolean { + return this.isActive; + } + + get configuration(): StrategyConfig { + return { ...this.config }; + } +} + +// Strategy execution engine +export class StrategyEngine extends EventEmitter { + private strategies: Map = new Map(); + private logger; + private eventBus: EventBus; + private isRunning: boolean = false; + + constructor(eventBus: EventBus) { + super(); + this.eventBus = eventBus; + this.logger = getLogger('strategy-engine'); + } + + async initialize(): Promise { + // Subscribe to market data events + await this.eventBus.subscribe('market.data', this.handleMarketData.bind(this)); + await this.eventBus.subscribe('order.update', this.handleOrderUpdate.bind(this)); + await this.eventBus.subscribe('portfolio.update', this.handlePortfolioUpdate.bind(this)); + + this.logger.info('Strategy engine initialized'); + } + + async registerStrategy(strategy: BaseStrategy): Promise { + if (this.strategies.has(strategy.id)) { + throw new Error(`Strategy ${strategy.id} already registered`); + } + + this.strategies.set(strategy.id, strategy); + + // Forward strategy events + strategy.on('signal', (signal) => this.emit('signal', signal)); + strategy.on('error', (error) => this.emit('error', error)); + + this.logger.info('Strategy registered', { strategyId: strategy.id }); + } + + async unregisterStrategy(strategyId: string): Promise { + const strategy = this.strategies.get(strategyId); + if (!strategy) { + throw new Error(`Strategy ${strategyId} not found`); + } + + if (strategy.active) { + await strategy.stop(); + } + + strategy.removeAllListeners(); + this.strategies.delete(strategyId); + + this.logger.info('Strategy unregistered', { strategyId }); + } + + async startStrategy(strategyId: string): Promise { + const strategy = this.strategies.get(strategyId); + if (!strategy) { + throw new Error(`Strategy ${strategyId} not found`); + } + + await strategy.start(); + } + + async stopStrategy(strategyId: string): Promise { + const strategy = this.strategies.get(strategyId); + if (!strategy) { + throw new Error(`Strategy ${strategyId} not found`); + } + + await strategy.stop(); + } + + async startAll(): Promise { + if (this.isRunning) { + this.logger.warn('Engine already running'); + return; + } + + const startPromises = Array.from(this.strategies.values()) + .filter(strategy => strategy.configuration.enabled) + .map(strategy => strategy.start()); + + await Promise.all(startPromises); + this.isRunning = true; + this.logger.info('All strategies started'); + this.emit('started'); + } + + async stopAll(): Promise { + if (!this.isRunning) { + this.logger.warn('Engine not running'); + return; + } + + const stopPromises = Array.from(this.strategies.values()) + .filter(strategy => strategy.active) + .map(strategy => strategy.stop()); + + await Promise.all(stopPromises); + this.isRunning = false; + this.logger.info('All strategies stopped'); + this.emit('stopped'); + } + + private async handleMarketData(message: any): Promise { + const { symbol, ...data } = message.data; + + // Find strategies that trade this symbol + const relevantStrategies = Array.from(this.strategies.values()) + .filter(strategy => + strategy.active && + strategy.configuration.symbols.includes(symbol) + ); + + for (const strategy of relevantStrategies) { + try { + // Create context for this strategy + const context: StrategyContext = { + symbol, + timeframe: '1m', // TODO: Get from strategy config + data: new DataFrame([data]), // TODO: Use historical data + indicators: {}, + portfolio: { + totalValue: 100000, // TODO: Get real portfolio data + cash: 50000, + positions: [], + totalPnL: 0, + dayPnL: 0 + }, + timestamp: data.timestamp + }; + + const signals = await strategy.onMarketData(context); + + for (const signal of signals) { + await strategy.onSignal(signal); + } + } catch (error) { + this.logger.error('Error processing market data for strategy', { + error, + strategyId: strategy.id, + symbol + }); + } + } + } + + private async handleOrderUpdate(message: any): Promise { + // Handle order updates - notify relevant strategies + this.logger.debug('Order update received', { data: message.data }); + } + + private async handlePortfolioUpdate(message: any): Promise { + // Handle portfolio updates - notify relevant strategies + this.logger.debug('Portfolio update received', { data: message.data }); + } + + getStrategy(strategyId: string): BaseStrategy | undefined { + return this.strategies.get(strategyId); + } + + getStrategies(): BaseStrategy[] { + return Array.from(this.strategies.values()); + } + + getActiveStrategies(): BaseStrategy[] { + return this.getStrategies().filter(strategy => strategy.active); + } + + async shutdown(): Promise { + await this.stopAll(); + this.strategies.clear(); + this.removeAllListeners(); + this.logger.info('Strategy engine shutdown'); + } } \ No newline at end of file diff --git a/libs/strategy-engine/tsconfig.json b/libs/strategy-engine/tsconfig.json index f8bf0c2..b6d6746 100644 --- a/libs/strategy-engine/tsconfig.json +++ b/libs/strategy-engine/tsconfig.json @@ -1,15 +1,15 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], "references": [ - { "path": "../types" }, - { "path": "../logger" }, - { "path": "../utils" }, - { "path": "../event-bus" }, - { "path": "../data-frame" }, - { "path": "../vector-engine" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], "references": [ + { "path": "../types" }, + { "path": "../logger" }, + { "path": "../utils" }, + { "path": "../event-bus" }, + { "path": "../data-frame" }, + { "path": "../vector-engine" } + ] +} diff --git a/libs/strategy-engine/turbo.json b/libs/strategy-engine/turbo.json index 9514f5b..2b83f12 100644 --- a/libs/strategy-engine/turbo.json +++ b/libs/strategy-engine/turbo.json @@ -1,17 +1,17 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": [ - "@stock-bot/types#build", - "@stock-bot/config#build", - "@stock-bot/logger#build", - "@stock-bot/utils#build", - "@stock-bot/data-frame#build", - "@stock-bot/event-bus#build" - ], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": [ + "@stock-bot/types#build", + "@stock-bot/config#build", + "@stock-bot/logger#build", + "@stock-bot/utils#build", + "@stock-bot/data-frame#build", + "@stock-bot/event-bus#build" + ], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/types/package.json b/libs/types/package.json index 8f5a75b..717f169 100644 --- a/libs/types/package.json +++ b/libs/types/package.json @@ -1,35 +1,35 @@ -{ - "name": "@stock-bot/types", - "version": "1.0.0", - "description": "Shared type definitions for Stock Bot platform", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "type-check": "tsc --noEmit", - "clean": "rimraf dist", - "test": "bun test" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "bun-types": "^1.2.15" - }, - "keywords": [ - "types", - "typescript", - "stock-bot" - ], - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] -} +{ + "name": "@stock-bot/types", + "version": "1.0.0", + "description": "Shared type definitions for Stock Bot platform", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "type-check": "tsc --noEmit", + "clean": "rimraf dist", + "test": "bun test" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "bun-types": "^1.2.15" + }, + "keywords": [ + "types", + "typescript", + "stock-bot" + ], + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] +} diff --git a/libs/types/src/index.ts b/libs/types/src/index.ts index 1da603f..f2c96e3 100644 --- a/libs/types/src/index.ts +++ b/libs/types/src/index.ts @@ -1 +1 @@ -// Export all types from the events module +// Export all types from the events module diff --git a/libs/types/tsconfig.json b/libs/types/tsconfig.json index 442d0da..1c8366f 100644 --- a/libs/types/tsconfig.json +++ b/libs/types/tsconfig.json @@ -1,9 +1,9 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [] +} diff --git a/libs/types/turbo.json b/libs/types/turbo.json index da0894a..74e52b7 100644 --- a/libs/types/turbo.json +++ b/libs/types/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": [], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": [], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/utils/POSITION_SIZING_FIXES.md b/libs/utils/POSITION_SIZING_FIXES.md index ba6e416..7f1cc65 100644 --- a/libs/utils/POSITION_SIZING_FIXES.md +++ b/libs/utils/POSITION_SIZING_FIXES.md @@ -1,97 +1,97 @@ -# Position Sizing Calculations - Fixed Issues Summary - -## Issues Identified and Fixed: - -### 1. **Duplicate Kelly Function** βœ… FIXED -- **Problem**: Two different `kellyPositionSize` functions with conflicting signatures -- **Solution**: Removed the duplicate and kept the version with proper `KellyParams` interface - -### 2. **Incorrect Kelly Criterion Formula** βœ… FIXED -- **Problem**: Formula was implemented as `winRate - ((1 - winRate) / winLossRatio)` -- **Solution**: Corrected to `(winRate * winLossRatio - lossRate) / winLossRatio` -- **Mathematical Validation**: Kelly formula is `f = (bp - q) / b` where b = win/loss ratio, p = win rate, q = loss rate - -### 3. **Missing Input Validation** βœ… FIXED -- **Problem**: Functions didn't validate inputs (zero/negative values) -- **Solution**: Added comprehensive input validation to all functions -- **Examples**: - - Check for `accountSize <= 0`, `riskPercentage <= 0` - - Validate `winRate` is between 0 and 1 - - Ensure prices and volatilities are positive - -### 4. **ATR Position Sizing Units Error** βœ… FIXED -- **Problem**: Function returned risk amount instead of shares -- **Solution**: Changed to return `Math.floor(riskAmount / stopDistance)` (shares) - -### 5. **Flawed Monte Carlo Simulation** βœ… FIXED -- **Problem**: Simulation applied returns to entire portfolio instead of position-sized returns -- **Solution**: Rewritten to test different position fractions and optimize based on Sharpe ratio - -### 6. **Redundant Liquidity Calculations** βœ… FIXED -- **Problem**: Unnecessary conversions between shares and dollar values -- **Solution**: Simplified to directly compare `desiredPositionSize` with `maxShares` - -### 7. **Risk Parity Not Using Target Risk** βœ… FIXED -- **Problem**: `targetRisk` parameter was ignored in calculations -- **Solution**: Incorporated target risk into weight calculations: `weight * (targetRisk / asset.volatility)` - -### 8. **Missing Safety Constraints** βœ… FIXED -- **Problem**: No caps on leverage or volatility ratios -- **Solution**: Added reasonable caps: - - Volatility targeting: max 2x leverage - - Volatility adjustment: max 3x leverage - - Kelly fraction: max 25% with safety factor - -### 9. **Correlation Risk Calculation Error** βœ… FIXED -- **Problem**: Correlation risk calculation didn't consider relative position sizes -- **Solution**: Weight correlations by relative position sizes for more accurate risk assessment - -### 10. **Integer Share Handling** βœ… FIXED -- **Problem**: Functions returned fractional shares -- **Solution**: Added `Math.floor()` to return whole shares where appropriate - -## Mathematical Validation Examples: - -### Fixed Risk Position Sizing: -``` -Account: $100,000, Risk: 2%, Entry: $100, Stop: $95 -Risk Amount: $100,000 Γ— 0.02 = $2,000 -Risk Per Share: |$100 - $95| = $5 -Position Size: $2,000 Γ· $5 = 400 shares βœ… -``` - -### Kelly Criterion (Corrected): -``` -Win Rate: 60%, Avg Win: $150, Avg Loss: $100 -Win/Loss Ratio: $150 Γ· $100 = 1.5 -Kelly Fraction: (1.5 Γ— 0.6 - 0.4) Γ· 1.5 = 0.333 -With Safety Factor (25%): 0.333 Γ— 0.25 = 0.083 -Position: $100,000 Γ— 0.083 = $8,333 βœ… -``` - -### Volatility Targeting: -``` -Price: $100, Asset Vol: 20%, Target Vol: 10% -Volatility Ratio: 10% Γ· 20% = 0.5 -Position Value: $100,000 Γ— 0.5 = $50,000 -Position Size: $50,000 Γ· $100 = 500 shares βœ… -``` - -## Edge Cases Now Handled: -- βœ… Zero or negative account sizes -- βœ… Equal entry and stop loss prices -- βœ… Zero volatility assets -- βœ… Negative expectancy strategies -- βœ… Extreme correlation values -- βœ… Division by zero scenarios -- βœ… Invalid win rates (≀0 or β‰₯1) - -## Additional Improvements: -- βœ… Consistent return types (whole shares vs. dollar amounts) -- βœ… Proper TypeScript interfaces for all parameters -- βœ… Comprehensive JSDoc documentation -- βœ… Mathematical formulas verified against financial literature -- βœ… Safety factors to prevent over-leveraging -- βœ… Portfolio-level risk management functions - -All position sizing calculations are now mathematically correct, properly validated, and production-ready! +# Position Sizing Calculations - Fixed Issues Summary + +## Issues Identified and Fixed: + +### 1. **Duplicate Kelly Function** βœ… FIXED +- **Problem**: Two different `kellyPositionSize` functions with conflicting signatures +- **Solution**: Removed the duplicate and kept the version with proper `KellyParams` interface + +### 2. **Incorrect Kelly Criterion Formula** βœ… FIXED +- **Problem**: Formula was implemented as `winRate - ((1 - winRate) / winLossRatio)` +- **Solution**: Corrected to `(winRate * winLossRatio - lossRate) / winLossRatio` +- **Mathematical Validation**: Kelly formula is `f = (bp - q) / b` where b = win/loss ratio, p = win rate, q = loss rate + +### 3. **Missing Input Validation** βœ… FIXED +- **Problem**: Functions didn't validate inputs (zero/negative values) +- **Solution**: Added comprehensive input validation to all functions +- **Examples**: + - Check for `accountSize <= 0`, `riskPercentage <= 0` + - Validate `winRate` is between 0 and 1 + - Ensure prices and volatilities are positive + +### 4. **ATR Position Sizing Units Error** βœ… FIXED +- **Problem**: Function returned risk amount instead of shares +- **Solution**: Changed to return `Math.floor(riskAmount / stopDistance)` (shares) + +### 5. **Flawed Monte Carlo Simulation** βœ… FIXED +- **Problem**: Simulation applied returns to entire portfolio instead of position-sized returns +- **Solution**: Rewritten to test different position fractions and optimize based on Sharpe ratio + +### 6. **Redundant Liquidity Calculations** βœ… FIXED +- **Problem**: Unnecessary conversions between shares and dollar values +- **Solution**: Simplified to directly compare `desiredPositionSize` with `maxShares` + +### 7. **Risk Parity Not Using Target Risk** βœ… FIXED +- **Problem**: `targetRisk` parameter was ignored in calculations +- **Solution**: Incorporated target risk into weight calculations: `weight * (targetRisk / asset.volatility)` + +### 8. **Missing Safety Constraints** βœ… FIXED +- **Problem**: No caps on leverage or volatility ratios +- **Solution**: Added reasonable caps: + - Volatility targeting: max 2x leverage + - Volatility adjustment: max 3x leverage + - Kelly fraction: max 25% with safety factor + +### 9. **Correlation Risk Calculation Error** βœ… FIXED +- **Problem**: Correlation risk calculation didn't consider relative position sizes +- **Solution**: Weight correlations by relative position sizes for more accurate risk assessment + +### 10. **Integer Share Handling** βœ… FIXED +- **Problem**: Functions returned fractional shares +- **Solution**: Added `Math.floor()` to return whole shares where appropriate + +## Mathematical Validation Examples: + +### Fixed Risk Position Sizing: +``` +Account: $100,000, Risk: 2%, Entry: $100, Stop: $95 +Risk Amount: $100,000 Γ— 0.02 = $2,000 +Risk Per Share: |$100 - $95| = $5 +Position Size: $2,000 Γ· $5 = 400 shares βœ… +``` + +### Kelly Criterion (Corrected): +``` +Win Rate: 60%, Avg Win: $150, Avg Loss: $100 +Win/Loss Ratio: $150 Γ· $100 = 1.5 +Kelly Fraction: (1.5 Γ— 0.6 - 0.4) Γ· 1.5 = 0.333 +With Safety Factor (25%): 0.333 Γ— 0.25 = 0.083 +Position: $100,000 Γ— 0.083 = $8,333 βœ… +``` + +### Volatility Targeting: +``` +Price: $100, Asset Vol: 20%, Target Vol: 10% +Volatility Ratio: 10% Γ· 20% = 0.5 +Position Value: $100,000 Γ— 0.5 = $50,000 +Position Size: $50,000 Γ· $100 = 500 shares βœ… +``` + +## Edge Cases Now Handled: +- βœ… Zero or negative account sizes +- βœ… Equal entry and stop loss prices +- βœ… Zero volatility assets +- βœ… Negative expectancy strategies +- βœ… Extreme correlation values +- βœ… Division by zero scenarios +- βœ… Invalid win rates (≀0 or β‰₯1) + +## Additional Improvements: +- βœ… Consistent return types (whole shares vs. dollar amounts) +- βœ… Proper TypeScript interfaces for all parameters +- βœ… Comprehensive JSDoc documentation +- βœ… Mathematical formulas verified against financial literature +- βœ… Safety factors to prevent over-leveraging +- βœ… Portfolio-level risk management functions + +All position sizing calculations are now mathematically correct, properly validated, and production-ready! diff --git a/libs/utils/README.md b/libs/utils/README.md index eebc980..6663910 100644 --- a/libs/utils/README.md +++ b/libs/utils/README.md @@ -1,34 +1,34 @@ -# Utils Library - -Common utility functions shared across services in the stock-bot project. - -## Included Utilities - -### Date Utilities - -Helper functions for working with market dates: - -```typescript -import { dateUtils } from '@stock-bot/utils'; - -// Check if a date is a trading day -const isTradingDay = dateUtils.isTradingDay(new Date()); - -// Get the next trading day -const nextTradingDay = dateUtils.getNextTradingDay(new Date()); -``` - -### Financial Utilities - -Mathematical functions for financial calculations: - -```typescript -import { calculateCAGR, calculateSharpeRatio } from '@stock-bot/utils'; - -// Calculate compound annual growth rate -const returns = [0.05, 0.03, -0.01, 0.04, 0.02]; -const cagr = calculateCAGR(startValue, endValue, years); - -// Calculate Sharpe ratio -const sharpeRatio = calculateSharpeRatio(returns, 0.02); -``` +# Utils Library + +Common utility functions shared across services in the stock-bot project. + +## Included Utilities + +### Date Utilities + +Helper functions for working with market dates: + +```typescript +import { dateUtils } from '@stock-bot/utils'; + +// Check if a date is a trading day +const isTradingDay = dateUtils.isTradingDay(new Date()); + +// Get the next trading day +const nextTradingDay = dateUtils.getNextTradingDay(new Date()); +``` + +### Financial Utilities + +Mathematical functions for financial calculations: + +```typescript +import { calculateCAGR, calculateSharpeRatio } from '@stock-bot/utils'; + +// Calculate compound annual growth rate +const returns = [0.05, 0.03, -0.01, 0.04, 0.02]; +const cagr = calculateCAGR(startValue, endValue, years); + +// Calculate Sharpe ratio +const sharpeRatio = calculateSharpeRatio(returns, 0.02); +``` diff --git a/libs/utils/package.json b/libs/utils/package.json index 2cf648b..67d8e56 100644 --- a/libs/utils/package.json +++ b/libs/utils/package.json @@ -1,33 +1,33 @@ -{ - "name": "@stock-bot/utils", - "version": "1.0.0", - "description": "Common utility functions for stock-bot services", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "clean": "rimraf dist", - "test": "bun test" - }, - "dependencies": { - "@stock-bot/types": "*", - "date-fns": "^2.30.0" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "bun-types": "^1.2.15" - }, - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] -} +{ + "name": "@stock-bot/utils", + "version": "1.0.0", + "description": "Common utility functions for stock-bot services", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "clean": "rimraf dist", + "test": "bun test" + }, + "dependencies": { + "@stock-bot/types": "*", + "date-fns": "^2.30.0" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "bun-types": "^1.2.15" + }, + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] +} diff --git a/libs/utils/src/calculations/basic-calculations.ts b/libs/utils/src/calculations/basic-calculations.ts index b1e88e2..f0dbcb0 100644 --- a/libs/utils/src/calculations/basic-calculations.ts +++ b/libs/utils/src/calculations/basic-calculations.ts @@ -1,391 +1,391 @@ -/** - * Basic Financial Calculations - * Core mathematical functions for financial analysis - */ - -/** - * Calculate percentage change between two values - */ -export function percentageChange(oldValue: number, newValue: number): number { - if (oldValue === 0) return 0; - return ((newValue - oldValue) / oldValue) * 100; -} - -/** - * Calculate simple return - */ -export function simpleReturn(initialPrice: number, finalPrice: number): number { - if (initialPrice === 0) return 0; - return (finalPrice - initialPrice) / initialPrice; -} - -/** - * Calculate logarithmic return - */ -export function logReturn(initialPrice: number, finalPrice: number): number { - if (initialPrice <= 0 || finalPrice <= 0) return 0; - return Math.log(finalPrice / initialPrice); -} - -/** - * Calculate compound annual growth rate (CAGR) - */ -export function cagr(startValue: number, endValue: number, years: number): number { - if (years <= 0 || startValue <= 0 || endValue <= 0) return 0; - return Math.pow(endValue / startValue, 1 / years) - 1; -} - -/** - * Calculate annualized return from periodic returns - */ -export function annualizeReturn(periodicReturn: number, periodsPerYear: number): number { - return Math.pow(1 + periodicReturn, periodsPerYear) - 1; -} - -/** - * Calculate annualized volatility from periodic returns - */ -export function annualizeVolatility(periodicVolatility: number, periodsPerYear: number): number { - return periodicVolatility * Math.sqrt(periodsPerYear); -} - -/** - * Calculate present value - */ -export function presentValue(futureValue: number, rate: number, periods: number): number { - return futureValue / Math.pow(1 + rate, periods); -} - -/** - * Calculate future value - */ -export function futureValue(presentValue: number, rate: number, periods: number): number { - return presentValue * Math.pow(1 + rate, periods); -} - -/** - * Calculate net present value of cash flows - */ -export function netPresentValue(cashFlows: number[], discountRate: number): number { - return cashFlows.reduce((npv, cashFlow, index) => { - return npv + cashFlow / Math.pow(1 + discountRate, index); - }, 0); -} - -/** - * Calculate internal rate of return (IRR) using Newton-Raphson method - */ -export function internalRateOfReturn(cashFlows: number[], guess: number = 0.1, maxIterations: number = 100): number { - let rate = guess; - - for (let i = 0; i < maxIterations; i++) { - let npv = 0; - let dnpv = 0; - - for (let j = 0; j < cashFlows.length; j++) { - npv += cashFlows[j] / Math.pow(1 + rate, j); - dnpv += -j * cashFlows[j] / Math.pow(1 + rate, j + 1); - } - - if (Math.abs(npv) < 1e-10) break; - if (Math.abs(dnpv) < 1e-10) break; - - rate = rate - npv / dnpv; - } - - return rate; -} - -/** - * Calculate payback period - */ -export function paybackPeriod(initialInvestment: number, cashFlows: number[]): number { - let cumulativeCashFlow = 0; - - for (let i = 0; i < cashFlows.length; i++) { - cumulativeCashFlow += cashFlows[i]; - if (cumulativeCashFlow >= initialInvestment) { - return i + 1 - (cumulativeCashFlow - initialInvestment) / cashFlows[i]; - } - } - - return -1; // Never pays back -} - -/** - * Calculate compound interest - */ -export function compoundInterest( - principal: number, - rate: number, - periods: number, - compoundingFrequency: number = 1 -): number { - return principal * Math.pow(1 + rate / compoundingFrequency, compoundingFrequency * periods); -} - -/** - * Calculate effective annual rate - */ -export function effectiveAnnualRate(nominalRate: number, compoundingFrequency: number): number { - return Math.pow(1 + nominalRate / compoundingFrequency, compoundingFrequency) - 1; -} - -/** - * Calculate bond price given yield - */ -export function bondPrice( - faceValue: number, - couponRate: number, - yieldToMaturity: number, - periodsToMaturity: number, - paymentsPerYear: number = 2 -): number { - const couponPayment = (faceValue * couponRate) / paymentsPerYear; - const discountRate = yieldToMaturity / paymentsPerYear; - - let price = 0; - - // Present value of coupon payments - for (let i = 1; i <= periodsToMaturity; i++) { - price += couponPayment / Math.pow(1 + discountRate, i); - } - - // Present value of face value - price += faceValue / Math.pow(1 + discountRate, periodsToMaturity); - - return price; -} - -/** - * Calculate bond yield given price (Newton-Raphson approximation) - */ -export function bondYield( - price: number, - faceValue: number, - couponRate: number, - periodsToMaturity: number, - paymentsPerYear: number = 2, - guess: number = 0.05 -): number { - let yield_ = guess; - const maxIterations = 100; - const tolerance = 1e-8; - - for (let i = 0; i < maxIterations; i++) { - const calculatedPrice = bondPrice(faceValue, couponRate, yield_, periodsToMaturity, paymentsPerYear); - const diff = calculatedPrice - price; - - if (Math.abs(diff) < tolerance) break; - - // Numerical derivative - const delta = 0.0001; - const priceUp = bondPrice(faceValue, couponRate, yield_ + delta, periodsToMaturity, paymentsPerYear); - const derivative = (priceUp - calculatedPrice) / delta; - - if (Math.abs(derivative) < tolerance) break; - - yield_ = yield_ - diff / derivative; - } - - return yield_; -} - -/** - * Calculate duration (Macaulay duration) - */ -export function macaulayDuration( - faceValue: number, - couponRate: number, - yieldToMaturity: number, - periodsToMaturity: number, - paymentsPerYear: number = 2 -): number { - const couponPayment = (faceValue * couponRate) / paymentsPerYear; - const discountRate = yieldToMaturity / paymentsPerYear; - const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); - - let weightedTime = 0; - - // Weighted time of coupon payments - for (let i = 1; i <= periodsToMaturity; i++) { - const presentValue = couponPayment / Math.pow(1 + discountRate, i); - weightedTime += (i * presentValue) / bondPriceValue; - } - - // Weighted time of face value - const faceValuePV = faceValue / Math.pow(1 + discountRate, periodsToMaturity); - weightedTime += (periodsToMaturity * faceValuePV) / bondPriceValue; - - return weightedTime / paymentsPerYear; // Convert to years -} - -/** - * Calculate modified duration - */ -export function modifiedDuration( - faceValue: number, - couponRate: number, - yieldToMaturity: number, - periodsToMaturity: number, - paymentsPerYear: number = 2 -): number { - const macDuration = macaulayDuration(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); - return macDuration / (1 + yieldToMaturity / paymentsPerYear); -} - -/** - * Calculate bond convexity - */ -export function bondConvexity( - faceValue: number, - couponRate: number, - yieldToMaturity: number, - periodsToMaturity: number, - paymentsPerYear: number = 2 -): number { - const couponPayment = (faceValue * couponRate) / paymentsPerYear; - const discountRate = yieldToMaturity / paymentsPerYear; - - let convexity = 0; - const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); - - for (let i = 1; i <= periodsToMaturity; i++) { - const presentValue = couponPayment / Math.pow(1 + discountRate, i); - convexity += (i * (i + 1) * presentValue) / Math.pow(1 + discountRate, 2); - } - - const faceValuePV = faceValue / Math.pow(1 + discountRate, periodsToMaturity); - convexity += (periodsToMaturity * (periodsToMaturity + 1) * faceValuePV) / Math.pow(1 + discountRate, 2); - - return convexity / (bondPriceValue * paymentsPerYear * paymentsPerYear); -} - -/** - * Calculate dollar duration - */ -export function dollarDuration( - faceValue: number, - couponRate: number, - yieldToMaturity: number, - periodsToMaturity: number, - paymentsPerYear: number = 2, - basisPointChange: number = 0.01 // 1 basis point = 0.01% -): number { - const modifiedDur = modifiedDuration(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); - const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); - return modifiedDur * bondPriceValue * basisPointChange; -} - -/** - * Calculate accrued interest - */ -export function accruedInterest( - faceValue: number, - couponRate: number, - daysSinceLastCoupon: number, - daysInCouponPeriod: number -): number { - return (faceValue * couponRate) * (daysSinceLastCoupon / daysInCouponPeriod); -} - -/** - * Calculate clean price - */ -export function cleanPrice(dirtyPrice: number, accruedInterestValue: number): number { - return dirtyPrice - accruedInterestValue; -} - -/** - * Calculate dirty price - */ -export function dirtyPrice(cleanPriceValue: number, accruedInterestValue: number): number { - return cleanPriceValue + accruedInterestValue; -} - -/** - * Calculate dividend discount model (DDM) - */ -export function dividendDiscountModel( - currentDividend: number, - growthRate: number, - discountRate: number -): number { - if (discountRate <= growthRate) return NaN; // Indeterminate - return currentDividend * (1 + growthRate) / (discountRate - growthRate); -} - -/** - * Calculate weighted average cost of capital (WACC) - */ -export function weightedAverageCostOfCapital( - costOfEquity: number, - costOfDebt: number, - equityWeight: number, - debtWeight: number, - taxRate: number -): number { - return (equityWeight * costOfEquity) + (debtWeight * costOfDebt * (1 - taxRate)); -} - -/** - * Calculate capital asset pricing model (CAPM) - */ -export function capitalAssetPricingModel( - riskFreeRate: number, - beta: number, - marketRiskPremium: number -): number { - return riskFreeRate + beta * marketRiskPremium; -} - -/** - * Calculate hurdle rate - */ -export function hurdleRate( - costOfCapital: number, - riskPremium: number -): number { - return costOfCapital + riskPremium; -} - -/** - * Calculate degree of operating leverage (DOL) - */ -export function degreeOfOperatingLeverage( - contributionMargin: number, - operatingIncome: number -): number { - return contributionMargin / operatingIncome; -} - -/** - * Calculate degree of financial leverage (DFL) - */ -export function degreeOfFinancialLeverage( - ebit: number, - earningsBeforeTax: number -): number { - return ebit / earningsBeforeTax; -} - -/** - * Calculate degree of total leverage (DTL) - */ -export function degreeOfTotalLeverage( - dol: number, - dfl: number -): number { - return dol * dfl; -} - -/** - * Calculate economic value added (EVA) - */ -export function economicValueAdded( - netOperatingProfitAfterTax: number, - capitalInvested: number, - wacc: number -): number { - return netOperatingProfitAfterTax - (capitalInvested * wacc); -} +/** + * Basic Financial Calculations + * Core mathematical functions for financial analysis + */ + +/** + * Calculate percentage change between two values + */ +export function percentageChange(oldValue: number, newValue: number): number { + if (oldValue === 0) return 0; + return ((newValue - oldValue) / oldValue) * 100; +} + +/** + * Calculate simple return + */ +export function simpleReturn(initialPrice: number, finalPrice: number): number { + if (initialPrice === 0) return 0; + return (finalPrice - initialPrice) / initialPrice; +} + +/** + * Calculate logarithmic return + */ +export function logReturn(initialPrice: number, finalPrice: number): number { + if (initialPrice <= 0 || finalPrice <= 0) return 0; + return Math.log(finalPrice / initialPrice); +} + +/** + * Calculate compound annual growth rate (CAGR) + */ +export function cagr(startValue: number, endValue: number, years: number): number { + if (years <= 0 || startValue <= 0 || endValue <= 0) return 0; + return Math.pow(endValue / startValue, 1 / years) - 1; +} + +/** + * Calculate annualized return from periodic returns + */ +export function annualizeReturn(periodicReturn: number, periodsPerYear: number): number { + return Math.pow(1 + periodicReturn, periodsPerYear) - 1; +} + +/** + * Calculate annualized volatility from periodic returns + */ +export function annualizeVolatility(periodicVolatility: number, periodsPerYear: number): number { + return periodicVolatility * Math.sqrt(periodsPerYear); +} + +/** + * Calculate present value + */ +export function presentValue(futureValue: number, rate: number, periods: number): number { + return futureValue / Math.pow(1 + rate, periods); +} + +/** + * Calculate future value + */ +export function futureValue(presentValue: number, rate: number, periods: number): number { + return presentValue * Math.pow(1 + rate, periods); +} + +/** + * Calculate net present value of cash flows + */ +export function netPresentValue(cashFlows: number[], discountRate: number): number { + return cashFlows.reduce((npv, cashFlow, index) => { + return npv + cashFlow / Math.pow(1 + discountRate, index); + }, 0); +} + +/** + * Calculate internal rate of return (IRR) using Newton-Raphson method + */ +export function internalRateOfReturn(cashFlows: number[], guess: number = 0.1, maxIterations: number = 100): number { + let rate = guess; + + for (let i = 0; i < maxIterations; i++) { + let npv = 0; + let dnpv = 0; + + for (let j = 0; j < cashFlows.length; j++) { + npv += cashFlows[j] / Math.pow(1 + rate, j); + dnpv += -j * cashFlows[j] / Math.pow(1 + rate, j + 1); + } + + if (Math.abs(npv) < 1e-10) break; + if (Math.abs(dnpv) < 1e-10) break; + + rate = rate - npv / dnpv; + } + + return rate; +} + +/** + * Calculate payback period + */ +export function paybackPeriod(initialInvestment: number, cashFlows: number[]): number { + let cumulativeCashFlow = 0; + + for (let i = 0; i < cashFlows.length; i++) { + cumulativeCashFlow += cashFlows[i]; + if (cumulativeCashFlow >= initialInvestment) { + return i + 1 - (cumulativeCashFlow - initialInvestment) / cashFlows[i]; + } + } + + return -1; // Never pays back +} + +/** + * Calculate compound interest + */ +export function compoundInterest( + principal: number, + rate: number, + periods: number, + compoundingFrequency: number = 1 +): number { + return principal * Math.pow(1 + rate / compoundingFrequency, compoundingFrequency * periods); +} + +/** + * Calculate effective annual rate + */ +export function effectiveAnnualRate(nominalRate: number, compoundingFrequency: number): number { + return Math.pow(1 + nominalRate / compoundingFrequency, compoundingFrequency) - 1; +} + +/** + * Calculate bond price given yield + */ +export function bondPrice( + faceValue: number, + couponRate: number, + yieldToMaturity: number, + periodsToMaturity: number, + paymentsPerYear: number = 2 +): number { + const couponPayment = (faceValue * couponRate) / paymentsPerYear; + const discountRate = yieldToMaturity / paymentsPerYear; + + let price = 0; + + // Present value of coupon payments + for (let i = 1; i <= periodsToMaturity; i++) { + price += couponPayment / Math.pow(1 + discountRate, i); + } + + // Present value of face value + price += faceValue / Math.pow(1 + discountRate, periodsToMaturity); + + return price; +} + +/** + * Calculate bond yield given price (Newton-Raphson approximation) + */ +export function bondYield( + price: number, + faceValue: number, + couponRate: number, + periodsToMaturity: number, + paymentsPerYear: number = 2, + guess: number = 0.05 +): number { + let yield_ = guess; + const maxIterations = 100; + const tolerance = 1e-8; + + for (let i = 0; i < maxIterations; i++) { + const calculatedPrice = bondPrice(faceValue, couponRate, yield_, periodsToMaturity, paymentsPerYear); + const diff = calculatedPrice - price; + + if (Math.abs(diff) < tolerance) break; + + // Numerical derivative + const delta = 0.0001; + const priceUp = bondPrice(faceValue, couponRate, yield_ + delta, periodsToMaturity, paymentsPerYear); + const derivative = (priceUp - calculatedPrice) / delta; + + if (Math.abs(derivative) < tolerance) break; + + yield_ = yield_ - diff / derivative; + } + + return yield_; +} + +/** + * Calculate duration (Macaulay duration) + */ +export function macaulayDuration( + faceValue: number, + couponRate: number, + yieldToMaturity: number, + periodsToMaturity: number, + paymentsPerYear: number = 2 +): number { + const couponPayment = (faceValue * couponRate) / paymentsPerYear; + const discountRate = yieldToMaturity / paymentsPerYear; + const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); + + let weightedTime = 0; + + // Weighted time of coupon payments + for (let i = 1; i <= periodsToMaturity; i++) { + const presentValue = couponPayment / Math.pow(1 + discountRate, i); + weightedTime += (i * presentValue) / bondPriceValue; + } + + // Weighted time of face value + const faceValuePV = faceValue / Math.pow(1 + discountRate, periodsToMaturity); + weightedTime += (periodsToMaturity * faceValuePV) / bondPriceValue; + + return weightedTime / paymentsPerYear; // Convert to years +} + +/** + * Calculate modified duration + */ +export function modifiedDuration( + faceValue: number, + couponRate: number, + yieldToMaturity: number, + periodsToMaturity: number, + paymentsPerYear: number = 2 +): number { + const macDuration = macaulayDuration(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); + return macDuration / (1 + yieldToMaturity / paymentsPerYear); +} + +/** + * Calculate bond convexity + */ +export function bondConvexity( + faceValue: number, + couponRate: number, + yieldToMaturity: number, + periodsToMaturity: number, + paymentsPerYear: number = 2 +): number { + const couponPayment = (faceValue * couponRate) / paymentsPerYear; + const discountRate = yieldToMaturity / paymentsPerYear; + + let convexity = 0; + const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); + + for (let i = 1; i <= periodsToMaturity; i++) { + const presentValue = couponPayment / Math.pow(1 + discountRate, i); + convexity += (i * (i + 1) * presentValue) / Math.pow(1 + discountRate, 2); + } + + const faceValuePV = faceValue / Math.pow(1 + discountRate, periodsToMaturity); + convexity += (periodsToMaturity * (periodsToMaturity + 1) * faceValuePV) / Math.pow(1 + discountRate, 2); + + return convexity / (bondPriceValue * paymentsPerYear * paymentsPerYear); +} + +/** + * Calculate dollar duration + */ +export function dollarDuration( + faceValue: number, + couponRate: number, + yieldToMaturity: number, + periodsToMaturity: number, + paymentsPerYear: number = 2, + basisPointChange: number = 0.01 // 1 basis point = 0.01% +): number { + const modifiedDur = modifiedDuration(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); + const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear); + return modifiedDur * bondPriceValue * basisPointChange; +} + +/** + * Calculate accrued interest + */ +export function accruedInterest( + faceValue: number, + couponRate: number, + daysSinceLastCoupon: number, + daysInCouponPeriod: number +): number { + return (faceValue * couponRate) * (daysSinceLastCoupon / daysInCouponPeriod); +} + +/** + * Calculate clean price + */ +export function cleanPrice(dirtyPrice: number, accruedInterestValue: number): number { + return dirtyPrice - accruedInterestValue; +} + +/** + * Calculate dirty price + */ +export function dirtyPrice(cleanPriceValue: number, accruedInterestValue: number): number { + return cleanPriceValue + accruedInterestValue; +} + +/** + * Calculate dividend discount model (DDM) + */ +export function dividendDiscountModel( + currentDividend: number, + growthRate: number, + discountRate: number +): number { + if (discountRate <= growthRate) return NaN; // Indeterminate + return currentDividend * (1 + growthRate) / (discountRate - growthRate); +} + +/** + * Calculate weighted average cost of capital (WACC) + */ +export function weightedAverageCostOfCapital( + costOfEquity: number, + costOfDebt: number, + equityWeight: number, + debtWeight: number, + taxRate: number +): number { + return (equityWeight * costOfEquity) + (debtWeight * costOfDebt * (1 - taxRate)); +} + +/** + * Calculate capital asset pricing model (CAPM) + */ +export function capitalAssetPricingModel( + riskFreeRate: number, + beta: number, + marketRiskPremium: number +): number { + return riskFreeRate + beta * marketRiskPremium; +} + +/** + * Calculate hurdle rate + */ +export function hurdleRate( + costOfCapital: number, + riskPremium: number +): number { + return costOfCapital + riskPremium; +} + +/** + * Calculate degree of operating leverage (DOL) + */ +export function degreeOfOperatingLeverage( + contributionMargin: number, + operatingIncome: number +): number { + return contributionMargin / operatingIncome; +} + +/** + * Calculate degree of financial leverage (DFL) + */ +export function degreeOfFinancialLeverage( + ebit: number, + earningsBeforeTax: number +): number { + return ebit / earningsBeforeTax; +} + +/** + * Calculate degree of total leverage (DTL) + */ +export function degreeOfTotalLeverage( + dol: number, + dfl: number +): number { + return dol * dfl; +} + +/** + * Calculate economic value added (EVA) + */ +export function economicValueAdded( + netOperatingProfitAfterTax: number, + capitalInvested: number, + wacc: number +): number { + return netOperatingProfitAfterTax - (capitalInvested * wacc); +} diff --git a/libs/utils/src/calculations/correlation-analysis.ts b/libs/utils/src/calculations/correlation-analysis.ts index 77d9d98..50c3c87 100644 --- a/libs/utils/src/calculations/correlation-analysis.ts +++ b/libs/utils/src/calculations/correlation-analysis.ts @@ -1,1167 +1,1167 @@ -/** - * Correlation Analysis Module - * - * Provides comprehensive correlation and covariance analysis tools for financial time series. - * Includes correlation matrices, rolling correlations, regime-dependent correlations, - * and advanced correlation modeling techniques. - */ - -export interface CorrelationResult { - correlation: number; - pValue: number; - significance: boolean; - confidenceInterval?: [number, number]; -} - -export interface CorrelationMatrix { - matrix: number[][]; - labels: string[]; - eigenvalues: number[]; - eigenvectors: number[][]; - conditionNumber: number; -} - -export interface RollingCorrelationResult { - correlations: number[]; - timestamps: Date[]; - average: number; - volatility: number; - min: number; - max: number; -} - -export interface CovarianceMatrix { - matrix: number[][]; - labels: string[]; - volatilities: number[]; - correlations: number[][]; - eigenvalues: number[]; - determinant: number; -} - -export interface CorrelationBreakdown { - linear: number; - nonlinear: number; - tail: number; - rank: number; -} - -export interface DynamicCorrelationModel { - parameters: number[]; - correlations: number[]; - logLikelihood: number; - aic: number; - bic: number; -} - -/** - * Calculate Pearson correlation coefficient between two time series - */ -export function pearsonCorrelation( - x: number[], - y: number[] -): CorrelationResult { - if (x.length !== y.length || x.length < 2) { - throw new Error('Arrays must have same length and at least 2 observations'); - } - - const n = x.length; - const sumX = x.reduce((a, b) => a + b, 0); - const sumY = y.reduce((a, b) => a + b, 0); - const sumXY = x.reduce((sum, xi, i) => sum + xi * y[i], 0); - const sumX2 = x.reduce((sum, xi) => sum + xi * xi, 0); - const sumY2 = y.reduce((sum, yi) => sum + yi * yi, 0); - - const numerator = n * sumXY - sumX * sumY; - const denominator = Math.sqrt((n * sumX2 - sumX * sumX) * (n * sumY2 - sumY * sumY)); - - const correlation = denominator === 0 ? 0 : numerator / denominator; - - // Calculate statistical significance (t-test) - const df = n - 2; - const tStat = correlation * Math.sqrt(df / (1 - correlation * correlation)); - const pValue = 2 * (1 - studentTCDF(Math.abs(tStat), df)); - const significance = pValue < 0.05; - - // Calculate confidence interval (Fisher transformation) - const z = 0.5 * Math.log((1 + correlation) / (1 - correlation)); - const seZ = 1 / Math.sqrt(n - 3); - const zLower = z - 1.96 * seZ; - const zUpper = z + 1.96 * seZ; - const confidenceInterval: [number, number] = [ - (Math.exp(2 * zLower) - 1) / (Math.exp(2 * zLower) + 1), - (Math.exp(2 * zUpper) - 1) / (Math.exp(2 * zUpper) + 1) - ]; - - return { - correlation, - pValue, - significance, - confidenceInterval - }; -} - - - -/** - * Calculate Spearman rank correlation coefficient - */ -export function spearmanCorrelation(x: number[], y: number[]): CorrelationResult { - if (x.length !== y.length || x.length < 2) { - throw new Error('Arrays must have same length and at least 2 observations'); - } - - // Convert to ranks - const xRanks = getRanks(x); - const yRanks = getRanks(y); - - return pearsonCorrelation(xRanks, yRanks); -} - -/** - * Calculate Kendall's tau correlation coefficient - */ -export function kendallTau(x: number[], y: number[]): CorrelationResult { - if (x.length !== y.length || x.length < 2) { - throw new Error('Arrays must have same length and at least 2 observations'); - } - - const n = x.length; - let concordant = 0; - let discordant = 0; - - for (let i = 0; i < n - 1; i++) { - for (let j = i + 1; j < n; j++) { - const xDiff = x[i] - x[j]; - const yDiff = y[i] - y[j]; - - if (xDiff * yDiff > 0) { - concordant++; - } else if (xDiff * yDiff < 0) { - discordant++; - } - } - } - - const correlation = (concordant - discordant) / (n * (n - 1) / 2); - - // Approximate p-value for large samples - const variance = (2 * (2 * n + 5)) / (9 * n * (n - 1)); - const z = correlation / Math.sqrt(variance); - const pValue = 2 * (1 - normalCDF(Math.abs(z))); - const significance = pValue < 0.05; - - return { - correlation, - pValue, - significance - }; -} - -/** - * Calculate correlation matrix for multiple time series - */ -export function correlationMatrix( - data: number[][], - labels: string[] = [], - method: 'pearson' | 'spearman' | 'kendall' = 'pearson' -): CorrelationMatrix { - const n = data.length; - - if (labels.length === 0) { - labels = Array.from({ length: n }, (_, i) => `Series${i + 1}`); - } - - const matrix: number[][] = Array(n).fill(null).map(() => Array(n).fill(0)); - - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - if (i === j) { - matrix[i][j] = 1; - } else { - let corrResult: CorrelationResult; - switch (method) { - case 'spearman': - corrResult = spearmanCorrelation(data[i], data[j]); - break; - case 'kendall': - corrResult = kendallTau(data[i], data[j]); - break; - default: - corrResult = pearsonCorrelation(data[i], data[j]); - } - matrix[i][j] = corrResult.correlation; - } - } - } - - // Calculate eigenvalues and eigenvectors - const { eigenvalues, eigenvectors } = eigenDecomposition(matrix); - - // Calculate condition number - const conditionNumber = Math.max(...eigenvalues) / Math.min(...eigenvalues.filter(x => x > 1e-10)); - - return { - matrix, - labels, - eigenvalues, - eigenvectors, - conditionNumber - }; -} - -/** - * Calculate rolling correlation between two time series - */ -export function rollingCorrelation( - x: number[], - y: number[], - window: number, - timestamps?: Date[] -): RollingCorrelationResult { - if (x.length !== y.length || window > x.length) { - throw new Error('Invalid input parameters'); - } - - const correlations: number[] = []; - const resultTimestamps: Date[] = []; - - for (let i = window - 1; i < x.length; i++) { - const xWindow = x.slice(i - window + 1, i + 1); - const yWindow = y.slice(i - window + 1, i + 1); - - const corr = pearsonCorrelation(xWindow, yWindow).correlation; - correlations.push(corr); - - if (timestamps) { - resultTimestamps.push(timestamps[i]); - } else { - resultTimestamps.push(new Date(i)); - } - } - - const average = correlations.reduce((a, b) => a + b, 0) / correlations.length; - const variance = correlations.reduce((sum, corr) => sum + Math.pow(corr - average, 2), 0) / correlations.length; - const volatility = Math.sqrt(variance); - const min = Math.min(...correlations); - const max = Math.max(...correlations); - - return { - correlations, - timestamps: resultTimestamps, - average, - volatility, - min, - max - }; -} - -/** - * Calculate covariance matrix - */ -export function covarianceMatrix(data: number[][], labels: string[] = []): CovarianceMatrix { - const n = data.length; - - if (labels.length === 0) { - labels = Array.from({ length: n }, (_, i) => `Series${i + 1}`); - } - - // Calculate means - const means = data.map(series => series.reduce((a, b) => a + b, 0) / series.length); - - // Calculate covariance matrix - const matrix: number[][] = Array(n).fill(null).map(() => Array(n).fill(0)); - const m = data[0].length; // Number of observations - - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - let covariance = 0; - for (let k = 0; k < m; k++) { - covariance += (data[i][k] - means[i]) * (data[j][k] - means[j]); - } - matrix[i][j] = covariance / (m - 1); - } - } - - // Calculate volatilities (standard deviations) - const volatilities = data.map((series, i) => Math.sqrt(matrix[i][i])); - - // Calculate correlation matrix from covariance matrix - const correlations: number[][] = Array(n).fill(null).map(() => Array(n).fill(0)); - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - correlations[i][j] = matrix[i][j] / (volatilities[i] * volatilities[j]); - } - } - - // Calculate eigenvalues - const { eigenvalues } = eigenDecomposition(matrix); - - // Calculate determinant - const determinant = eigenvalues.reduce((prod, val) => prod * val, 1); - - return { - matrix, - labels, - volatilities, - correlations, - eigenvalues, - determinant - }; -} - -/** - * Calculate partial correlation controlling for other variables - */ -export function partialCorrelation( - x: number[], - y: number[], - controls: number[][] -): CorrelationResult { - // Use matrix operations to calculate partial correlation - const n = x.length; - const k = controls.length; - - // Build design matrix - const X = Array(n).fill(null).map(() => Array(k + 1).fill(1)); - for (let i = 0; i < n; i++) { - for (let j = 0; j < k; j++) { - X[i][j + 1] = controls[j][i]; - } - } - - // Calculate residuals for x and y after regressing on controls - const xResiduals = residuals(x, X); - const yResiduals = residuals(y, X); - - return pearsonCorrelation(xResiduals, yResiduals); -} - -/** - * Test for correlation regime changes - */ -export function correlationRegimeAnalysis( - x: number[], - y: number[], - window: number = 60 -): { - regimes: { start: number; end: number; correlation: number }[]; - breakpoints: number[]; - stability: number; -} { - const rollingCorr = rollingCorrelation(x, y, window); - const correlations = rollingCorr.correlations; - - // Detect regime changes using CUSUM test - const breakpoints: number[] = []; - const threshold = 2.0; // CUSUM threshold - - let cusum = 0; - const mean = correlations.reduce((a, b) => a + b, 0) / correlations.length; - - for (let i = 1; i < correlations.length; i++) { - cusum += correlations[i] - mean; - if (Math.abs(cusum) > threshold) { - breakpoints.push(i); - cusum = 0; - } - } - - // Build regimes - const regimes: { start: number; end: number; correlation: number }[] = []; - let start = 0; - - for (const breakpoint of breakpoints) { - const regimeCorr = correlations.slice(start, breakpoint); - const avgCorr = regimeCorr.reduce((a, b) => a + b, 0) / regimeCorr.length; - - regimes.push({ - start, - end: breakpoint, - correlation: avgCorr - }); - start = breakpoint; - } - - // Add final regime - if (start < correlations.length) { - const regimeCorr = correlations.slice(start); - const avgCorr = regimeCorr.reduce((a, b) => a + b, 0) / regimeCorr.length; - - regimes.push({ - start, - end: correlations.length, - correlation: avgCorr - }); - } - - // Calculate stability measure - const regimeVariances = regimes.map(regime => { - const regimeCorr = correlations.slice(regime.start, regime.end); - const mean = regime.correlation; - return regimeCorr.reduce((sum, corr) => sum + Math.pow(corr - mean, 2), 0) / regimeCorr.length; - }); - - const stability = 1 / (1 + regimeVariances.reduce((a, b) => a + b, 0) / regimeVariances.length); - - return { - regimes, - breakpoints, - stability - }; -} - -/** - * Calculate tail correlation using copula methods - */ -export function tailCorrelation( - x: number[], - y: number[], - threshold: number = 0.05 -): { - upperTail: number; - lowerTail: number; - symmetric: boolean; -} { - const n = x.length; - const upperThreshold = 1 - threshold; - const lowerThreshold = threshold; - - // Convert to uniform marginals - const xRanks = getRanks(x).map(rank => rank / n); - const yRanks = getRanks(y).map(rank => rank / n); - - // Upper tail correlation - let upperCount = 0; - let upperTotal = 0; - - for (let i = 0; i < n; i++) { - if (xRanks[i] > upperThreshold) { - upperTotal++; - if (yRanks[i] > upperThreshold) { - upperCount++; - } - } - } - - const upperTail = upperTotal > 0 ? upperCount / upperTotal : 0; - - // Lower tail correlation - let lowerCount = 0; - let lowerTotal = 0; - - for (let i = 0; i < n; i++) { - if (xRanks[i] < lowerThreshold) { - lowerTotal++; - if (yRanks[i] < lowerThreshold) { - lowerCount++; - } - } - } - - const lowerTail = lowerTotal > 0 ? lowerCount / lowerTotal : 0; - - // Test for symmetry - const symmetric = Math.abs(upperTail - lowerTail) < 0.1; - - return { - upperTail, - lowerTail, - symmetric - }; -} - -/** - * Dynamic Conditional Correlation (DCC) model estimation - */ -export function dccModel( - data: number[][], - maxIter: number = 100, - tolerance: number = 1e-6 -): DynamicCorrelationModel { - const n = data.length; - const T = data[0].length; - - // Initialize parameters [alpha, beta] - let params = [0.01, 0.95]; - - // Standardize data (assume unit variance for simplicity) - const standardizedData = data.map(series => { - const mean = series.reduce((a, b) => a + b, 0) / series.length; - const variance = series.reduce((sum, x) => sum + Math.pow(x - mean, 2), 0) / (series.length - 1); - const std = Math.sqrt(variance); - return series.map(x => (x - mean) / std); - }); - - let correlations: number[] = []; - let logLikelihood = -Infinity; - - for (let iter = 0; iter < maxIter; iter++) { - const [alpha, beta] = params; - - // Calculate dynamic correlations - correlations = []; - - // Initialize with unconditional correlation - const unconditionalCorr = pearsonCorrelation(standardizedData[0], standardizedData[1]).correlation; - let Qt = unconditionalCorr; - - let newLogLikelihood = 0; - - for (let t = 1; t < T; t++) { - // Update correlation - const prevShock = standardizedData[0][t-1] * standardizedData[1][t-1]; - Qt = (1 - alpha - beta) * unconditionalCorr + alpha * prevShock + beta * Qt; - - correlations.push(Qt); - - // Add to log-likelihood - const det = 1 - Qt * Qt; - if (det > 0) { - newLogLikelihood -= 0.5 * Math.log(det); - newLogLikelihood -= 0.5 * ( - Math.pow(standardizedData[0][t], 2) + - Math.pow(standardizedData[1][t], 2) - - 2 * Qt * standardizedData[0][t] * standardizedData[1][t] - ) / det; - } - } - - // Check convergence - if (Math.abs(newLogLikelihood - logLikelihood) < tolerance) { - break; - } - - logLikelihood = newLogLikelihood; - - // Simple gradient update (in practice, use more sophisticated optimization) - params[0] = Math.max(0.001, Math.min(0.999, params[0] + 0.001)); - params[1] = Math.max(0.001, Math.min(0.999 - params[0], params[1] + 0.001)); - } - - // Calculate information criteria - const k = 2; // Number of parameters - const aic = -2 * logLikelihood + 2 * k; - const bic = -2 * logLikelihood + k * Math.log(T); - - return { - parameters: params, - correlations, - logLikelihood, - aic, - bic - }; -} - -/** - * Test for Granger causality in correlations - */ -export function grangerCausalityTest( - x: number[], - y: number[], - maxLag: number = 5 -): { - xCausesY: { fStatistic: number; pValue: number; significant: boolean }; - yCausesX: { fStatistic: number; pValue: number; significant: boolean }; - optimalLag: number; -} { - let bestLag = 1; - let minAIC = Infinity; - - // Find optimal lag - for (let lag = 1; lag <= maxLag; lag++) { - const aic = varModel(x, y, lag).aic; - if (aic < minAIC) { - minAIC = aic; - bestLag = lag; - } - } - - // Test x -> y causality - const fullModel = varModel(x, y, bestLag); - const restrictedModelY = arModel(y, bestLag); - - const fStatX = ((restrictedModelY.rss - fullModel.rssY) / bestLag) / (fullModel.rssY / (x.length - 2 * bestLag - 1)); - const pValueX = 1 - fCDF(fStatX, bestLag, x.length - 2 * bestLag - 1); - - // Test y -> x causality - const restrictedModelX = arModel(x, bestLag); - - const fStatY = ((restrictedModelX.rss - fullModel.rssX) / bestLag) / (fullModel.rssX / (x.length - 2 * bestLag - 1)); - const pValueY = 1 - fCDF(fStatY, bestLag, x.length - 2 * bestLag - 1); - - return { - xCausesY: { - fStatistic: fStatX, - pValue: pValueX, - significant: pValueX < 0.05 - }, - yCausesX: { - fStatistic: fStatY, - pValue: pValueY, - significant: pValueY < 0.05 - }, - optimalLag: bestLag - }; -} -/** - * Calculate Distance Correlation - */ -export function distanceCorrelation(x: number[], y: number[]): CorrelationResult { - if (x.length !== y.length || x.length < 2) { - throw new Error('Arrays must have same length and at least 2 observations'); - } - - const n = x.length; - - // Calculate distance matrices - const a = Array(n).fill(null).map(() => Array(n).fill(0)); - const b = Array(n).fill(null).map(() => Array(n).fill(0)); - - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - a[i][j] = Math.abs(x[i] - x[j]); - b[i][j] = Math.abs(y[i] - y[j]); - } - } - - // Calculate double centered distance matrices - const aMeanRow = a.map(row => row.reduce((sum, val) => sum + val, 0) / n); - const bMeanRow = b.map(row => row.reduce((sum, val) => sum + val, 0) / n); - const aMeanTotal = aMeanRow.reduce((sum, val) => sum + val, 0) / n; - const bMeanTotal = bMeanRow.reduce((sum, val) => sum + val, 0) / n; - - const A = Array(n).fill(null).map(() => Array(n).fill(0)); - const B = Array(n).fill(null).map(() => Array(n).fill(0)); - - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - A[i][j] = a[i][j] - aMeanRow[i] - aMeanRow[j] + aMeanTotal; - B[i][j] = b[i][j] - bMeanRow[i] - bMeanRow[j] + bMeanTotal; - } - } - - // Calculate distance covariance and variances - let dcov = 0; - let dvarX = 0; - let dvarY = 0; - - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - dcov += A[i][j] * B[i][j]; - dvarX += A[i][j] * A[i][j]; - dvarY += B[i][j] * B[i][j]; - } - } - - dcov = Math.sqrt(dcov / (n * n)); - dvarX = Math.sqrt(dvarX / (n * n)); - dvarY = Math.sqrt(dvarY / (n * n)); - - const correlation = dvarX * dvarY === 0 ? 0 : dcov / Math.sqrt(dvarX * dvarY); - - // Approximate p-value (permutation test) - let pValue = 1; - const numPermutations = 100; - - for (let p = 0; p < numPermutations; p++) { - const yPermuted = shuffleArray([...y]); - const bPermuted = Array(n).fill(null).map(() => Array(n).fill(0)); - - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - bPermuted[i][j] = Math.abs(yPermuted[i] - yPermuted[j]); - } - } - - const bMeanRowPermuted = bPermuted.map(row => row.reduce((sum, val) => sum + val, 0) / n); - const bMeanTotalPermuted = bMeanRowPermuted.reduce((sum, val) => sum + val, 0) / n; - - const BPermuted = Array(n).fill(null).map(() => Array(n).fill(0)); - - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - BPermuted[i][j] = bPermuted[i][j] - bMeanRowPermuted[i] - bMeanRowPermuted[j] + bMeanTotalPermuted; - } - } - - let dcovPermuted = 0; - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - dcovPermuted += A[i][j] * BPermuted[i][j]; - } - } - dcovPermuted = Math.sqrt(dcovPermuted / (n * n)); - - if (dcovPermuted >= dcov) { - pValue++; - } - } - - pValue /= (numPermutations + 1); - const significance = pValue < 0.05; - - return { - correlation, - pValue, - significance - }; -} - -/** - * Calculate Mutual Information - */ -export function mutualInformation(x: number[], y: number[], numBins: number = 10): CorrelationResult { - if (x.length !== y.length || x.length < 2) { - throw new Error('Arrays must have same length and at least 2 observations'); - } - - const n = x.length; - - // Calculate histograms - const xMin = Math.min(...x); - const xMax = Math.max(...x); - const yMin = Math.min(...y); - const yMax = Math.max(...y); - - const xBinWidth = (xMax - xMin) / numBins; - const yBinWidth = (yMax - yMin) / numBins; - - const jointHistogram = Array(numBins).fill(null).map(() => Array(numBins).fill(0)); - const xHistogram = Array(numBins).fill(0); - const yHistogram = Array(numBins).fill(0); - - for (let i = 0; i < n; i++) { - const xBin = Math.floor((x[i] - xMin) / xBinWidth); - const yBin = Math.floor((y[i] - yMin) / yBinWidth); - - if (xBin >= 0 && xBin < numBins && yBin >= 0 && yBin < numBins) { - jointHistogram[xBin][yBin]++; - xHistogram[xBin]++; - yHistogram[yBin]++; - } - } - - // Calculate probabilities - const jointProbabilities = jointHistogram.map(row => row.map(count => count / n)); - const xProbabilities = xHistogram.map(count => count / n); - const yProbabilities = yHistogram.map(count => count / n); - - // Calculate mutual information - let mi = 0; - for (let i = 0; i < numBins; i++) { - for (let j = 0; j < numBins; j++) { - if (jointProbabilities[i][j] > 0 && xProbabilities[i] > 0 && yProbabilities[j] > 0) { - mi += jointProbabilities[i][j] * Math.log(jointProbabilities[i][j] / (xProbabilities[i] * yProbabilities[j])); - } - } - } - - const correlation = mi; // Use MI as correlation measure - - // Approximate p-value (permutation test) - let pValue = 1; - const numPermutations = 100; - - for (let p = 0; p < numPermutations; p++) { - const yPermuted = shuffleArray([...y]); - let miPermuted = 0; - - const jointHistogramPermuted = Array(numBins).fill(null).map(() => Array(numBins).fill(0)); - - for (let i = 0; i < n; i++) { - const xBin = Math.floor((x[i] - xMin) / xBinWidth); - const yBin = Math.floor((yPermuted[i] - yMin) / yBinWidth); - - if (xBin >= 0 && xBin < numBins && yBin >= 0 && yBin < numBins) { - jointHistogramPermuted[xBin][yBin]++; - } - } - - const jointProbabilitiesPermuted = jointHistogramPermuted.map(row => row.map(count => count / n)); - - for (let i = 0; i < numBins; i++) { - for (let j = 0; j < numBins; j++) { - if (jointProbabilitiesPermuted[i][j] > 0 && xProbabilities[i] > 0 && yProbabilities[j] > 0) { - miPermuted += jointProbabilitiesPermuted[i][j] * Math.log(jointProbabilitiesPermuted[i][j] / (xProbabilities[i] * yProbabilities[j])); - } - } - } - - if (miPermuted >= mi) { - pValue++; - } - } - - pValue /= (numPermutations + 1); - const significance = pValue < 0.05; - - return { - correlation, - pValue, - significance - }; -} - -/** - * Calculate Cross-Correlation - */ -export function crossCorrelation(x: number[], y: number[], maxLag: number): number[] { - const n = x.length; - if (n !== y.length) { - throw new Error('Arrays must have the same length'); - } - - const correlations: number[] = []; - - for (let lag = -maxLag; lag <= maxLag; lag++) { - let sum = 0; - let count = 0; - - for (let i = 0; i < n; i++) { - const yIndex = i + lag; - - if (yIndex >= 0 && yIndex < n) { - sum += (x[i] - average(x)) * (y[yIndex] - average(y)); - count++; - } - } - - const stdX = Math.sqrt(x.reduce((sum, xi) => sum + Math.pow(xi - average(x), 2), 0) / (n - 1)); - const stdY = Math.sqrt(y.reduce((sum, yi) => sum + Math.pow(yi - average(y), 2), 0) / (n - 1)); - - const correlation = count > 0 ? sum / ((count - 1) * stdX * stdY) : 0; - correlations.push(correlation); - } - - return correlations; -} - -/** - * Calculate Autocorrelation - */ -export function autocorrelation(x: number[], lag: number): number { - const n = x.length; - if (lag >= n) { - throw new Error('Lag must be less than the length of the array'); - } - - let sum = 0; - for (let i = lag; i < n; i++) { - sum += (x[i] - average(x)) * (x[i - lag] - average(x)); - } - - const std = Math.sqrt(x.reduce((sum, xi) => sum + Math.pow(xi - average(x), 2), 0) / (n - 1)); - return sum / ((n - lag - 1) * std * std); -} - -/** - * Helper function to shuffle an array (Fisher-Yates shuffle) - */ -function shuffleArray(array: T[]): T[] { - const newArray = [...array]; - for (let i = newArray.length - 1; i > 0; i--) { - const j = Math.floor(Math.random() * (i + 1)); - [newArray[i], newArray[j]] = [newArray[j], newArray[i]]; - } - return newArray; -} - -/** - * Helper function to calculate the average of an array of numbers - */ -function average(arr: number[]): number { - if (arr.length === 0) return 0; - return arr.reduce((a, b) => a + b, 0) / arr.length; -} - -function getRanks(arr: number[]): number[] { - const sorted = arr.map((val, idx) => ({ val, idx })).sort((a, b) => a.val - b.val); - const ranks = new Array(arr.length); - - for (let i = 0; i < sorted.length; i++) { - ranks[sorted[i].idx] = i + 1; - } - - return ranks; -} - -function studentTCDF(t: number, df: number): number { - // Approximation for Student's t CDF - const x = df / (t * t + df); - return 1 - 0.5 * betaIncomplete(df / 2, 0.5, x); -} - -function normalCDF(z: number): number { - return 0.5 * (1 + erf(z / Math.sqrt(2))); -} - -function erf(x: number): number { - // Approximation of error function - const a1 = 0.254829592; - const a2 = -0.284496736; - const a3 = 1.421413741; - const a4 = -1.453152027; - const a5 = 1.061405429; - const p = 0.3275911; - - const sign = x >= 0 ? 1 : -1; - x = Math.abs(x); - - const t = 1.0 / (1.0 + p * x); - const y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x); - - return sign * y; -} - -function betaIncomplete(a: number, b: number, x: number): number { - // Better approximation of incomplete beta function - if (x === 0) return 0; - if (x === 1) return 1; - - // Use continued fraction approximation (Lentz's algorithm) - const fpmin = 1e-30; - const maxIter = 200; - const eps = 3e-7; - - const bt = Math.exp( - gammaLn(a + b) - gammaLn(a) - gammaLn(b) + - a * Math.log(x) + b * Math.log(1 - x) - ); - - if (x < (a + 1) / (a + b + 2)) { - return bt * betaContinuedFraction(a, b, x) / a; - } else { - return 1 - bt * betaContinuedFraction(b, a, 1 - x) / b; - } - - function betaContinuedFraction(a: number, b: number, x: number): number { - let c = 1; - let d = 1 - (a + b) * x / (a + 1); - if (Math.abs(d) < fpmin) d = fpmin; - d = 1 / d; - let h = d; - - for (let m = 1; m <= maxIter; m++) { - const m2 = 2 * m; - const aa = m * (b - m) * x / ((a + m2 - 1) * (a + m2)); - d = 1 + aa * d; - if (Math.abs(d) < fpmin) d = fpmin; - c = 1 + aa / c; - if (Math.abs(c) < fpmin) c = fpmin; - d = 1 / d; - h *= d * c; - - const bb = -(a + m) * (a + b + m) * x / ((a + m2) * (a + m2 + 1)); - d = 1 + bb * d; - if (Math.abs(d) < fpmin) d = fpmin; - c = 1 + bb / c; - if (Math.abs(c) < fpmin) c = fpmin; - d = 1 / d; - const del = d * c; - h *= del; - - if (Math.abs(del - 1) < eps) break; - } - - return h; - } - - function gammaLn(xx: number): number { - const stp = 2.50662827465; - const coeffs = [ - 76.18009172947146, - -86.50532032941677, - 24.01409824083091, - -1.231739572450155, - 0.1208650973866179e-2, - -0.5395239384953e-5 - ]; - - let x = xx - 1; - let tmp = x + 5.5; - tmp -= (x + 0.5) * Math.log(tmp); - let ser = 1.000000000190015; - - for (let j = 0; j < 6; j++) { - x += 1; - ser += coeffs[j] / x; - } - - return -tmp + Math.log(stp * ser); - } -} - -function eigenDecomposition(matrix: number[][]): { eigenvalues: number[]; eigenvectors: number[][] } { - // Simplified eigenvalue decomposition (for symmetric matrices) - const n = matrix.length; - - // Power iteration for largest eigenvalue - const eigenvalues: number[] = []; - const eigenvectors: number[][] = []; - - for (let k = 0; k < Math.min(n, 3); k++) { // Calculate first 3 eigenvalues - let v = Array(n).fill(1 / Math.sqrt(n)); - let lambda = 0; - - for (let iter = 0; iter < 100; iter++) { - const Av = matrix.map(row => row.reduce((sum, val, i) => sum + val * v[i], 0)); - const newLambda = Av.reduce((sum, val, i) => sum + val * v[i], 0); - const norm = Math.sqrt(Av.reduce((sum, val) => sum + val * val, 0)); - - if (norm === 0) break; - - v = Av.map(val => val / norm); - - if (Math.abs(newLambda - lambda) < 1e-10) break; - lambda = newLambda; - } - - eigenvalues.push(lambda); - eigenvectors.push([...v]); - - // Deflate matrix for next eigenvalue - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - matrix[i][j] -= lambda * v[i] * v[j]; - } - } - } - - return { eigenvalues, eigenvectors }; -} - -function residuals(y: number[], X: number[][]): number[] { - // Simple linear regression to calculate residuals - const n = y.length; - const k = X[0].length; - - // Calculate (X'X)^-1 X' y - const XtX = Array(k).fill(null).map(() => Array(k).fill(0)); - const Xty = Array(k).fill(0); - - // X'X - for (let i = 0; i < k; i++) { - for (let j = 0; j < k; j++) { - for (let t = 0; t < n; t++) { - XtX[i][j] += X[t][i] * X[t][j]; - } - } - } - - // X'y - for (let i = 0; i < k; i++) { - for (let t = 0; t < n; t++) { - Xty[i] += X[t][i] * y[t]; - } - } - - // Solve for beta (simplified - assumes invertible) - const beta = solveLinearSystem(XtX, Xty); - - // Calculate residuals - const residuals: number[] = []; - for (let t = 0; t < n; t++) { - let fitted = 0; - for (let i = 0; i < k; i++) { - fitted += X[t][i] * beta[i]; - } - residuals.push(y[t] - fitted); - } - - return residuals; -} - -function solveLinearSystem(A: number[][], b: number[]): number[] { - // Gaussian elimination (simplified) - const n = A.length; - const augmented = A.map((row, i) => [...row, b[i]]); - - // Forward elimination - for (let i = 0; i < n; i++) { - for (let j = i + 1; j < n; j++) { - const factor = augmented[j][i] / augmented[i][i]; - for (let k = i; k <= n; k++) { - augmented[j][k] -= factor * augmented[i][k]; - } - } - } - - // Back substitution - const x = Array(n).fill(0); - for (let i = n - 1; i >= 0; i--) { - x[i] = augmented[i][n]; - for (let j = i + 1; j < n; j++) { - x[i] -= augmented[i][j] * x[j]; - } - x[i] /= augmented[i][i]; - } - - return x; -} - -function varModel(x: number[], y: number[], lag: number): { - rssX: number; - rssY: number; - aic: number; -} { - // Simplified VAR model calculation - const n = x.length - lag; - - // Build design matrix - const X = Array(n).fill(null).map(() => Array(2 * lag + 1).fill(1)); - const yX = Array(n).fill(0); - const yY = Array(n).fill(0); - - for (let t = 0; t < n; t++) { - yX[t] = x[t + lag]; - yY[t] = y[t + lag]; - - for (let l = 0; l < lag; l++) { - X[t][1 + l] = x[t + lag - 1 - l]; - X[t][1 + lag + l] = y[t + lag - 1 - l]; - } - } - - // Calculate residuals for both equations - const residualsX = residuals(yX, X); - const residualsY = residuals(yY, X); - - const rssX = residualsX.reduce((sum, r) => sum + r * r, 0); - const rssY = residualsY.reduce((sum, r) => sum + r * r, 0); - - const k = 2 * lag + 1; - const aic = n * Math.log(rssX + rssY) + 2 * k; - - return { rssX, rssY, aic }; -} - -function arModel(y: number[], lag: number): { rss: number } { - const n = y.length - lag; - - // Build design matrix - const X = Array(n).fill(null).map(() => Array(lag + 1).fill(1)); - const yVec = Array(n).fill(0); - - for (let t = 0; t < n; t++) { - yVec[t] = y[t + lag]; - - for (let l = 0; l < lag; l++) { - X[t][1 + l] = y[t + lag - 1 - l]; - } - } - - const res = residuals(yVec, X); - const rss = res.reduce((sum, r) => sum + r * r, 0); - - return { rss }; -} - -function fCDF(f: number, df1: number, df2: number): number { - // Approximation for F distribution CDF - if (f <= 0) return 0; - if (f === Infinity) return 1; - - const x = df2 / (df2 + df1 * f); - return 1 - betaIncomplete(df2 / 2, df1 / 2, x); -} +/** + * Correlation Analysis Module + * + * Provides comprehensive correlation and covariance analysis tools for financial time series. + * Includes correlation matrices, rolling correlations, regime-dependent correlations, + * and advanced correlation modeling techniques. + */ + +export interface CorrelationResult { + correlation: number; + pValue: number; + significance: boolean; + confidenceInterval?: [number, number]; +} + +export interface CorrelationMatrix { + matrix: number[][]; + labels: string[]; + eigenvalues: number[]; + eigenvectors: number[][]; + conditionNumber: number; +} + +export interface RollingCorrelationResult { + correlations: number[]; + timestamps: Date[]; + average: number; + volatility: number; + min: number; + max: number; +} + +export interface CovarianceMatrix { + matrix: number[][]; + labels: string[]; + volatilities: number[]; + correlations: number[][]; + eigenvalues: number[]; + determinant: number; +} + +export interface CorrelationBreakdown { + linear: number; + nonlinear: number; + tail: number; + rank: number; +} + +export interface DynamicCorrelationModel { + parameters: number[]; + correlations: number[]; + logLikelihood: number; + aic: number; + bic: number; +} + +/** + * Calculate Pearson correlation coefficient between two time series + */ +export function pearsonCorrelation( + x: number[], + y: number[] +): CorrelationResult { + if (x.length !== y.length || x.length < 2) { + throw new Error('Arrays must have same length and at least 2 observations'); + } + + const n = x.length; + const sumX = x.reduce((a, b) => a + b, 0); + const sumY = y.reduce((a, b) => a + b, 0); + const sumXY = x.reduce((sum, xi, i) => sum + xi * y[i], 0); + const sumX2 = x.reduce((sum, xi) => sum + xi * xi, 0); + const sumY2 = y.reduce((sum, yi) => sum + yi * yi, 0); + + const numerator = n * sumXY - sumX * sumY; + const denominator = Math.sqrt((n * sumX2 - sumX * sumX) * (n * sumY2 - sumY * sumY)); + + const correlation = denominator === 0 ? 0 : numerator / denominator; + + // Calculate statistical significance (t-test) + const df = n - 2; + const tStat = correlation * Math.sqrt(df / (1 - correlation * correlation)); + const pValue = 2 * (1 - studentTCDF(Math.abs(tStat), df)); + const significance = pValue < 0.05; + + // Calculate confidence interval (Fisher transformation) + const z = 0.5 * Math.log((1 + correlation) / (1 - correlation)); + const seZ = 1 / Math.sqrt(n - 3); + const zLower = z - 1.96 * seZ; + const zUpper = z + 1.96 * seZ; + const confidenceInterval: [number, number] = [ + (Math.exp(2 * zLower) - 1) / (Math.exp(2 * zLower) + 1), + (Math.exp(2 * zUpper) - 1) / (Math.exp(2 * zUpper) + 1) + ]; + + return { + correlation, + pValue, + significance, + confidenceInterval + }; +} + + + +/** + * Calculate Spearman rank correlation coefficient + */ +export function spearmanCorrelation(x: number[], y: number[]): CorrelationResult { + if (x.length !== y.length || x.length < 2) { + throw new Error('Arrays must have same length and at least 2 observations'); + } + + // Convert to ranks + const xRanks = getRanks(x); + const yRanks = getRanks(y); + + return pearsonCorrelation(xRanks, yRanks); +} + +/** + * Calculate Kendall's tau correlation coefficient + */ +export function kendallTau(x: number[], y: number[]): CorrelationResult { + if (x.length !== y.length || x.length < 2) { + throw new Error('Arrays must have same length and at least 2 observations'); + } + + const n = x.length; + let concordant = 0; + let discordant = 0; + + for (let i = 0; i < n - 1; i++) { + for (let j = i + 1; j < n; j++) { + const xDiff = x[i] - x[j]; + const yDiff = y[i] - y[j]; + + if (xDiff * yDiff > 0) { + concordant++; + } else if (xDiff * yDiff < 0) { + discordant++; + } + } + } + + const correlation = (concordant - discordant) / (n * (n - 1) / 2); + + // Approximate p-value for large samples + const variance = (2 * (2 * n + 5)) / (9 * n * (n - 1)); + const z = correlation / Math.sqrt(variance); + const pValue = 2 * (1 - normalCDF(Math.abs(z))); + const significance = pValue < 0.05; + + return { + correlation, + pValue, + significance + }; +} + +/** + * Calculate correlation matrix for multiple time series + */ +export function correlationMatrix( + data: number[][], + labels: string[] = [], + method: 'pearson' | 'spearman' | 'kendall' = 'pearson' +): CorrelationMatrix { + const n = data.length; + + if (labels.length === 0) { + labels = Array.from({ length: n }, (_, i) => `Series${i + 1}`); + } + + const matrix: number[][] = Array(n).fill(null).map(() => Array(n).fill(0)); + + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + if (i === j) { + matrix[i][j] = 1; + } else { + let corrResult: CorrelationResult; + switch (method) { + case 'spearman': + corrResult = spearmanCorrelation(data[i], data[j]); + break; + case 'kendall': + corrResult = kendallTau(data[i], data[j]); + break; + default: + corrResult = pearsonCorrelation(data[i], data[j]); + } + matrix[i][j] = corrResult.correlation; + } + } + } + + // Calculate eigenvalues and eigenvectors + const { eigenvalues, eigenvectors } = eigenDecomposition(matrix); + + // Calculate condition number + const conditionNumber = Math.max(...eigenvalues) / Math.min(...eigenvalues.filter(x => x > 1e-10)); + + return { + matrix, + labels, + eigenvalues, + eigenvectors, + conditionNumber + }; +} + +/** + * Calculate rolling correlation between two time series + */ +export function rollingCorrelation( + x: number[], + y: number[], + window: number, + timestamps?: Date[] +): RollingCorrelationResult { + if (x.length !== y.length || window > x.length) { + throw new Error('Invalid input parameters'); + } + + const correlations: number[] = []; + const resultTimestamps: Date[] = []; + + for (let i = window - 1; i < x.length; i++) { + const xWindow = x.slice(i - window + 1, i + 1); + const yWindow = y.slice(i - window + 1, i + 1); + + const corr = pearsonCorrelation(xWindow, yWindow).correlation; + correlations.push(corr); + + if (timestamps) { + resultTimestamps.push(timestamps[i]); + } else { + resultTimestamps.push(new Date(i)); + } + } + + const average = correlations.reduce((a, b) => a + b, 0) / correlations.length; + const variance = correlations.reduce((sum, corr) => sum + Math.pow(corr - average, 2), 0) / correlations.length; + const volatility = Math.sqrt(variance); + const min = Math.min(...correlations); + const max = Math.max(...correlations); + + return { + correlations, + timestamps: resultTimestamps, + average, + volatility, + min, + max + }; +} + +/** + * Calculate covariance matrix + */ +export function covarianceMatrix(data: number[][], labels: string[] = []): CovarianceMatrix { + const n = data.length; + + if (labels.length === 0) { + labels = Array.from({ length: n }, (_, i) => `Series${i + 1}`); + } + + // Calculate means + const means = data.map(series => series.reduce((a, b) => a + b, 0) / series.length); + + // Calculate covariance matrix + const matrix: number[][] = Array(n).fill(null).map(() => Array(n).fill(0)); + const m = data[0].length; // Number of observations + + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + let covariance = 0; + for (let k = 0; k < m; k++) { + covariance += (data[i][k] - means[i]) * (data[j][k] - means[j]); + } + matrix[i][j] = covariance / (m - 1); + } + } + + // Calculate volatilities (standard deviations) + const volatilities = data.map((series, i) => Math.sqrt(matrix[i][i])); + + // Calculate correlation matrix from covariance matrix + const correlations: number[][] = Array(n).fill(null).map(() => Array(n).fill(0)); + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + correlations[i][j] = matrix[i][j] / (volatilities[i] * volatilities[j]); + } + } + + // Calculate eigenvalues + const { eigenvalues } = eigenDecomposition(matrix); + + // Calculate determinant + const determinant = eigenvalues.reduce((prod, val) => prod * val, 1); + + return { + matrix, + labels, + volatilities, + correlations, + eigenvalues, + determinant + }; +} + +/** + * Calculate partial correlation controlling for other variables + */ +export function partialCorrelation( + x: number[], + y: number[], + controls: number[][] +): CorrelationResult { + // Use matrix operations to calculate partial correlation + const n = x.length; + const k = controls.length; + + // Build design matrix + const X = Array(n).fill(null).map(() => Array(k + 1).fill(1)); + for (let i = 0; i < n; i++) { + for (let j = 0; j < k; j++) { + X[i][j + 1] = controls[j][i]; + } + } + + // Calculate residuals for x and y after regressing on controls + const xResiduals = residuals(x, X); + const yResiduals = residuals(y, X); + + return pearsonCorrelation(xResiduals, yResiduals); +} + +/** + * Test for correlation regime changes + */ +export function correlationRegimeAnalysis( + x: number[], + y: number[], + window: number = 60 +): { + regimes: { start: number; end: number; correlation: number }[]; + breakpoints: number[]; + stability: number; +} { + const rollingCorr = rollingCorrelation(x, y, window); + const correlations = rollingCorr.correlations; + + // Detect regime changes using CUSUM test + const breakpoints: number[] = []; + const threshold = 2.0; // CUSUM threshold + + let cusum = 0; + const mean = correlations.reduce((a, b) => a + b, 0) / correlations.length; + + for (let i = 1; i < correlations.length; i++) { + cusum += correlations[i] - mean; + if (Math.abs(cusum) > threshold) { + breakpoints.push(i); + cusum = 0; + } + } + + // Build regimes + const regimes: { start: number; end: number; correlation: number }[] = []; + let start = 0; + + for (const breakpoint of breakpoints) { + const regimeCorr = correlations.slice(start, breakpoint); + const avgCorr = regimeCorr.reduce((a, b) => a + b, 0) / regimeCorr.length; + + regimes.push({ + start, + end: breakpoint, + correlation: avgCorr + }); + start = breakpoint; + } + + // Add final regime + if (start < correlations.length) { + const regimeCorr = correlations.slice(start); + const avgCorr = regimeCorr.reduce((a, b) => a + b, 0) / regimeCorr.length; + + regimes.push({ + start, + end: correlations.length, + correlation: avgCorr + }); + } + + // Calculate stability measure + const regimeVariances = regimes.map(regime => { + const regimeCorr = correlations.slice(regime.start, regime.end); + const mean = regime.correlation; + return regimeCorr.reduce((sum, corr) => sum + Math.pow(corr - mean, 2), 0) / regimeCorr.length; + }); + + const stability = 1 / (1 + regimeVariances.reduce((a, b) => a + b, 0) / regimeVariances.length); + + return { + regimes, + breakpoints, + stability + }; +} + +/** + * Calculate tail correlation using copula methods + */ +export function tailCorrelation( + x: number[], + y: number[], + threshold: number = 0.05 +): { + upperTail: number; + lowerTail: number; + symmetric: boolean; +} { + const n = x.length; + const upperThreshold = 1 - threshold; + const lowerThreshold = threshold; + + // Convert to uniform marginals + const xRanks = getRanks(x).map(rank => rank / n); + const yRanks = getRanks(y).map(rank => rank / n); + + // Upper tail correlation + let upperCount = 0; + let upperTotal = 0; + + for (let i = 0; i < n; i++) { + if (xRanks[i] > upperThreshold) { + upperTotal++; + if (yRanks[i] > upperThreshold) { + upperCount++; + } + } + } + + const upperTail = upperTotal > 0 ? upperCount / upperTotal : 0; + + // Lower tail correlation + let lowerCount = 0; + let lowerTotal = 0; + + for (let i = 0; i < n; i++) { + if (xRanks[i] < lowerThreshold) { + lowerTotal++; + if (yRanks[i] < lowerThreshold) { + lowerCount++; + } + } + } + + const lowerTail = lowerTotal > 0 ? lowerCount / lowerTotal : 0; + + // Test for symmetry + const symmetric = Math.abs(upperTail - lowerTail) < 0.1; + + return { + upperTail, + lowerTail, + symmetric + }; +} + +/** + * Dynamic Conditional Correlation (DCC) model estimation + */ +export function dccModel( + data: number[][], + maxIter: number = 100, + tolerance: number = 1e-6 +): DynamicCorrelationModel { + const n = data.length; + const T = data[0].length; + + // Initialize parameters [alpha, beta] + let params = [0.01, 0.95]; + + // Standardize data (assume unit variance for simplicity) + const standardizedData = data.map(series => { + const mean = series.reduce((a, b) => a + b, 0) / series.length; + const variance = series.reduce((sum, x) => sum + Math.pow(x - mean, 2), 0) / (series.length - 1); + const std = Math.sqrt(variance); + return series.map(x => (x - mean) / std); + }); + + let correlations: number[] = []; + let logLikelihood = -Infinity; + + for (let iter = 0; iter < maxIter; iter++) { + const [alpha, beta] = params; + + // Calculate dynamic correlations + correlations = []; + + // Initialize with unconditional correlation + const unconditionalCorr = pearsonCorrelation(standardizedData[0], standardizedData[1]).correlation; + let Qt = unconditionalCorr; + + let newLogLikelihood = 0; + + for (let t = 1; t < T; t++) { + // Update correlation + const prevShock = standardizedData[0][t-1] * standardizedData[1][t-1]; + Qt = (1 - alpha - beta) * unconditionalCorr + alpha * prevShock + beta * Qt; + + correlations.push(Qt); + + // Add to log-likelihood + const det = 1 - Qt * Qt; + if (det > 0) { + newLogLikelihood -= 0.5 * Math.log(det); + newLogLikelihood -= 0.5 * ( + Math.pow(standardizedData[0][t], 2) + + Math.pow(standardizedData[1][t], 2) - + 2 * Qt * standardizedData[0][t] * standardizedData[1][t] + ) / det; + } + } + + // Check convergence + if (Math.abs(newLogLikelihood - logLikelihood) < tolerance) { + break; + } + + logLikelihood = newLogLikelihood; + + // Simple gradient update (in practice, use more sophisticated optimization) + params[0] = Math.max(0.001, Math.min(0.999, params[0] + 0.001)); + params[1] = Math.max(0.001, Math.min(0.999 - params[0], params[1] + 0.001)); + } + + // Calculate information criteria + const k = 2; // Number of parameters + const aic = -2 * logLikelihood + 2 * k; + const bic = -2 * logLikelihood + k * Math.log(T); + + return { + parameters: params, + correlations, + logLikelihood, + aic, + bic + }; +} + +/** + * Test for Granger causality in correlations + */ +export function grangerCausalityTest( + x: number[], + y: number[], + maxLag: number = 5 +): { + xCausesY: { fStatistic: number; pValue: number; significant: boolean }; + yCausesX: { fStatistic: number; pValue: number; significant: boolean }; + optimalLag: number; +} { + let bestLag = 1; + let minAIC = Infinity; + + // Find optimal lag + for (let lag = 1; lag <= maxLag; lag++) { + const aic = varModel(x, y, lag).aic; + if (aic < minAIC) { + minAIC = aic; + bestLag = lag; + } + } + + // Test x -> y causality + const fullModel = varModel(x, y, bestLag); + const restrictedModelY = arModel(y, bestLag); + + const fStatX = ((restrictedModelY.rss - fullModel.rssY) / bestLag) / (fullModel.rssY / (x.length - 2 * bestLag - 1)); + const pValueX = 1 - fCDF(fStatX, bestLag, x.length - 2 * bestLag - 1); + + // Test y -> x causality + const restrictedModelX = arModel(x, bestLag); + + const fStatY = ((restrictedModelX.rss - fullModel.rssX) / bestLag) / (fullModel.rssX / (x.length - 2 * bestLag - 1)); + const pValueY = 1 - fCDF(fStatY, bestLag, x.length - 2 * bestLag - 1); + + return { + xCausesY: { + fStatistic: fStatX, + pValue: pValueX, + significant: pValueX < 0.05 + }, + yCausesX: { + fStatistic: fStatY, + pValue: pValueY, + significant: pValueY < 0.05 + }, + optimalLag: bestLag + }; +} +/** + * Calculate Distance Correlation + */ +export function distanceCorrelation(x: number[], y: number[]): CorrelationResult { + if (x.length !== y.length || x.length < 2) { + throw new Error('Arrays must have same length and at least 2 observations'); + } + + const n = x.length; + + // Calculate distance matrices + const a = Array(n).fill(null).map(() => Array(n).fill(0)); + const b = Array(n).fill(null).map(() => Array(n).fill(0)); + + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + a[i][j] = Math.abs(x[i] - x[j]); + b[i][j] = Math.abs(y[i] - y[j]); + } + } + + // Calculate double centered distance matrices + const aMeanRow = a.map(row => row.reduce((sum, val) => sum + val, 0) / n); + const bMeanRow = b.map(row => row.reduce((sum, val) => sum + val, 0) / n); + const aMeanTotal = aMeanRow.reduce((sum, val) => sum + val, 0) / n; + const bMeanTotal = bMeanRow.reduce((sum, val) => sum + val, 0) / n; + + const A = Array(n).fill(null).map(() => Array(n).fill(0)); + const B = Array(n).fill(null).map(() => Array(n).fill(0)); + + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + A[i][j] = a[i][j] - aMeanRow[i] - aMeanRow[j] + aMeanTotal; + B[i][j] = b[i][j] - bMeanRow[i] - bMeanRow[j] + bMeanTotal; + } + } + + // Calculate distance covariance and variances + let dcov = 0; + let dvarX = 0; + let dvarY = 0; + + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + dcov += A[i][j] * B[i][j]; + dvarX += A[i][j] * A[i][j]; + dvarY += B[i][j] * B[i][j]; + } + } + + dcov = Math.sqrt(dcov / (n * n)); + dvarX = Math.sqrt(dvarX / (n * n)); + dvarY = Math.sqrt(dvarY / (n * n)); + + const correlation = dvarX * dvarY === 0 ? 0 : dcov / Math.sqrt(dvarX * dvarY); + + // Approximate p-value (permutation test) + let pValue = 1; + const numPermutations = 100; + + for (let p = 0; p < numPermutations; p++) { + const yPermuted = shuffleArray([...y]); + const bPermuted = Array(n).fill(null).map(() => Array(n).fill(0)); + + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + bPermuted[i][j] = Math.abs(yPermuted[i] - yPermuted[j]); + } + } + + const bMeanRowPermuted = bPermuted.map(row => row.reduce((sum, val) => sum + val, 0) / n); + const bMeanTotalPermuted = bMeanRowPermuted.reduce((sum, val) => sum + val, 0) / n; + + const BPermuted = Array(n).fill(null).map(() => Array(n).fill(0)); + + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + BPermuted[i][j] = bPermuted[i][j] - bMeanRowPermuted[i] - bMeanRowPermuted[j] + bMeanTotalPermuted; + } + } + + let dcovPermuted = 0; + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + dcovPermuted += A[i][j] * BPermuted[i][j]; + } + } + dcovPermuted = Math.sqrt(dcovPermuted / (n * n)); + + if (dcovPermuted >= dcov) { + pValue++; + } + } + + pValue /= (numPermutations + 1); + const significance = pValue < 0.05; + + return { + correlation, + pValue, + significance + }; +} + +/** + * Calculate Mutual Information + */ +export function mutualInformation(x: number[], y: number[], numBins: number = 10): CorrelationResult { + if (x.length !== y.length || x.length < 2) { + throw new Error('Arrays must have same length and at least 2 observations'); + } + + const n = x.length; + + // Calculate histograms + const xMin = Math.min(...x); + const xMax = Math.max(...x); + const yMin = Math.min(...y); + const yMax = Math.max(...y); + + const xBinWidth = (xMax - xMin) / numBins; + const yBinWidth = (yMax - yMin) / numBins; + + const jointHistogram = Array(numBins).fill(null).map(() => Array(numBins).fill(0)); + const xHistogram = Array(numBins).fill(0); + const yHistogram = Array(numBins).fill(0); + + for (let i = 0; i < n; i++) { + const xBin = Math.floor((x[i] - xMin) / xBinWidth); + const yBin = Math.floor((y[i] - yMin) / yBinWidth); + + if (xBin >= 0 && xBin < numBins && yBin >= 0 && yBin < numBins) { + jointHistogram[xBin][yBin]++; + xHistogram[xBin]++; + yHistogram[yBin]++; + } + } + + // Calculate probabilities + const jointProbabilities = jointHistogram.map(row => row.map(count => count / n)); + const xProbabilities = xHistogram.map(count => count / n); + const yProbabilities = yHistogram.map(count => count / n); + + // Calculate mutual information + let mi = 0; + for (let i = 0; i < numBins; i++) { + for (let j = 0; j < numBins; j++) { + if (jointProbabilities[i][j] > 0 && xProbabilities[i] > 0 && yProbabilities[j] > 0) { + mi += jointProbabilities[i][j] * Math.log(jointProbabilities[i][j] / (xProbabilities[i] * yProbabilities[j])); + } + } + } + + const correlation = mi; // Use MI as correlation measure + + // Approximate p-value (permutation test) + let pValue = 1; + const numPermutations = 100; + + for (let p = 0; p < numPermutations; p++) { + const yPermuted = shuffleArray([...y]); + let miPermuted = 0; + + const jointHistogramPermuted = Array(numBins).fill(null).map(() => Array(numBins).fill(0)); + + for (let i = 0; i < n; i++) { + const xBin = Math.floor((x[i] - xMin) / xBinWidth); + const yBin = Math.floor((yPermuted[i] - yMin) / yBinWidth); + + if (xBin >= 0 && xBin < numBins && yBin >= 0 && yBin < numBins) { + jointHistogramPermuted[xBin][yBin]++; + } + } + + const jointProbabilitiesPermuted = jointHistogramPermuted.map(row => row.map(count => count / n)); + + for (let i = 0; i < numBins; i++) { + for (let j = 0; j < numBins; j++) { + if (jointProbabilitiesPermuted[i][j] > 0 && xProbabilities[i] > 0 && yProbabilities[j] > 0) { + miPermuted += jointProbabilitiesPermuted[i][j] * Math.log(jointProbabilitiesPermuted[i][j] / (xProbabilities[i] * yProbabilities[j])); + } + } + } + + if (miPermuted >= mi) { + pValue++; + } + } + + pValue /= (numPermutations + 1); + const significance = pValue < 0.05; + + return { + correlation, + pValue, + significance + }; +} + +/** + * Calculate Cross-Correlation + */ +export function crossCorrelation(x: number[], y: number[], maxLag: number): number[] { + const n = x.length; + if (n !== y.length) { + throw new Error('Arrays must have the same length'); + } + + const correlations: number[] = []; + + for (let lag = -maxLag; lag <= maxLag; lag++) { + let sum = 0; + let count = 0; + + for (let i = 0; i < n; i++) { + const yIndex = i + lag; + + if (yIndex >= 0 && yIndex < n) { + sum += (x[i] - average(x)) * (y[yIndex] - average(y)); + count++; + } + } + + const stdX = Math.sqrt(x.reduce((sum, xi) => sum + Math.pow(xi - average(x), 2), 0) / (n - 1)); + const stdY = Math.sqrt(y.reduce((sum, yi) => sum + Math.pow(yi - average(y), 2), 0) / (n - 1)); + + const correlation = count > 0 ? sum / ((count - 1) * stdX * stdY) : 0; + correlations.push(correlation); + } + + return correlations; +} + +/** + * Calculate Autocorrelation + */ +export function autocorrelation(x: number[], lag: number): number { + const n = x.length; + if (lag >= n) { + throw new Error('Lag must be less than the length of the array'); + } + + let sum = 0; + for (let i = lag; i < n; i++) { + sum += (x[i] - average(x)) * (x[i - lag] - average(x)); + } + + const std = Math.sqrt(x.reduce((sum, xi) => sum + Math.pow(xi - average(x), 2), 0) / (n - 1)); + return sum / ((n - lag - 1) * std * std); +} + +/** + * Helper function to shuffle an array (Fisher-Yates shuffle) + */ +function shuffleArray(array: T[]): T[] { + const newArray = [...array]; + for (let i = newArray.length - 1; i > 0; i--) { + const j = Math.floor(Math.random() * (i + 1)); + [newArray[i], newArray[j]] = [newArray[j], newArray[i]]; + } + return newArray; +} + +/** + * Helper function to calculate the average of an array of numbers + */ +function average(arr: number[]): number { + if (arr.length === 0) return 0; + return arr.reduce((a, b) => a + b, 0) / arr.length; +} + +function getRanks(arr: number[]): number[] { + const sorted = arr.map((val, idx) => ({ val, idx })).sort((a, b) => a.val - b.val); + const ranks = new Array(arr.length); + + for (let i = 0; i < sorted.length; i++) { + ranks[sorted[i].idx] = i + 1; + } + + return ranks; +} + +function studentTCDF(t: number, df: number): number { + // Approximation for Student's t CDF + const x = df / (t * t + df); + return 1 - 0.5 * betaIncomplete(df / 2, 0.5, x); +} + +function normalCDF(z: number): number { + return 0.5 * (1 + erf(z / Math.sqrt(2))); +} + +function erf(x: number): number { + // Approximation of error function + const a1 = 0.254829592; + const a2 = -0.284496736; + const a3 = 1.421413741; + const a4 = -1.453152027; + const a5 = 1.061405429; + const p = 0.3275911; + + const sign = x >= 0 ? 1 : -1; + x = Math.abs(x); + + const t = 1.0 / (1.0 + p * x); + const y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x); + + return sign * y; +} + +function betaIncomplete(a: number, b: number, x: number): number { + // Better approximation of incomplete beta function + if (x === 0) return 0; + if (x === 1) return 1; + + // Use continued fraction approximation (Lentz's algorithm) + const fpmin = 1e-30; + const maxIter = 200; + const eps = 3e-7; + + const bt = Math.exp( + gammaLn(a + b) - gammaLn(a) - gammaLn(b) + + a * Math.log(x) + b * Math.log(1 - x) + ); + + if (x < (a + 1) / (a + b + 2)) { + return bt * betaContinuedFraction(a, b, x) / a; + } else { + return 1 - bt * betaContinuedFraction(b, a, 1 - x) / b; + } + + function betaContinuedFraction(a: number, b: number, x: number): number { + let c = 1; + let d = 1 - (a + b) * x / (a + 1); + if (Math.abs(d) < fpmin) d = fpmin; + d = 1 / d; + let h = d; + + for (let m = 1; m <= maxIter; m++) { + const m2 = 2 * m; + const aa = m * (b - m) * x / ((a + m2 - 1) * (a + m2)); + d = 1 + aa * d; + if (Math.abs(d) < fpmin) d = fpmin; + c = 1 + aa / c; + if (Math.abs(c) < fpmin) c = fpmin; + d = 1 / d; + h *= d * c; + + const bb = -(a + m) * (a + b + m) * x / ((a + m2) * (a + m2 + 1)); + d = 1 + bb * d; + if (Math.abs(d) < fpmin) d = fpmin; + c = 1 + bb / c; + if (Math.abs(c) < fpmin) c = fpmin; + d = 1 / d; + const del = d * c; + h *= del; + + if (Math.abs(del - 1) < eps) break; + } + + return h; + } + + function gammaLn(xx: number): number { + const stp = 2.50662827465; + const coeffs = [ + 76.18009172947146, + -86.50532032941677, + 24.01409824083091, + -1.231739572450155, + 0.1208650973866179e-2, + -0.5395239384953e-5 + ]; + + let x = xx - 1; + let tmp = x + 5.5; + tmp -= (x + 0.5) * Math.log(tmp); + let ser = 1.000000000190015; + + for (let j = 0; j < 6; j++) { + x += 1; + ser += coeffs[j] / x; + } + + return -tmp + Math.log(stp * ser); + } +} + +function eigenDecomposition(matrix: number[][]): { eigenvalues: number[]; eigenvectors: number[][] } { + // Simplified eigenvalue decomposition (for symmetric matrices) + const n = matrix.length; + + // Power iteration for largest eigenvalue + const eigenvalues: number[] = []; + const eigenvectors: number[][] = []; + + for (let k = 0; k < Math.min(n, 3); k++) { // Calculate first 3 eigenvalues + let v = Array(n).fill(1 / Math.sqrt(n)); + let lambda = 0; + + for (let iter = 0; iter < 100; iter++) { + const Av = matrix.map(row => row.reduce((sum, val, i) => sum + val * v[i], 0)); + const newLambda = Av.reduce((sum, val, i) => sum + val * v[i], 0); + const norm = Math.sqrt(Av.reduce((sum, val) => sum + val * val, 0)); + + if (norm === 0) break; + + v = Av.map(val => val / norm); + + if (Math.abs(newLambda - lambda) < 1e-10) break; + lambda = newLambda; + } + + eigenvalues.push(lambda); + eigenvectors.push([...v]); + + // Deflate matrix for next eigenvalue + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + matrix[i][j] -= lambda * v[i] * v[j]; + } + } + } + + return { eigenvalues, eigenvectors }; +} + +function residuals(y: number[], X: number[][]): number[] { + // Simple linear regression to calculate residuals + const n = y.length; + const k = X[0].length; + + // Calculate (X'X)^-1 X' y + const XtX = Array(k).fill(null).map(() => Array(k).fill(0)); + const Xty = Array(k).fill(0); + + // X'X + for (let i = 0; i < k; i++) { + for (let j = 0; j < k; j++) { + for (let t = 0; t < n; t++) { + XtX[i][j] += X[t][i] * X[t][j]; + } + } + } + + // X'y + for (let i = 0; i < k; i++) { + for (let t = 0; t < n; t++) { + Xty[i] += X[t][i] * y[t]; + } + } + + // Solve for beta (simplified - assumes invertible) + const beta = solveLinearSystem(XtX, Xty); + + // Calculate residuals + const residuals: number[] = []; + for (let t = 0; t < n; t++) { + let fitted = 0; + for (let i = 0; i < k; i++) { + fitted += X[t][i] * beta[i]; + } + residuals.push(y[t] - fitted); + } + + return residuals; +} + +function solveLinearSystem(A: number[][], b: number[]): number[] { + // Gaussian elimination (simplified) + const n = A.length; + const augmented = A.map((row, i) => [...row, b[i]]); + + // Forward elimination + for (let i = 0; i < n; i++) { + for (let j = i + 1; j < n; j++) { + const factor = augmented[j][i] / augmented[i][i]; + for (let k = i; k <= n; k++) { + augmented[j][k] -= factor * augmented[i][k]; + } + } + } + + // Back substitution + const x = Array(n).fill(0); + for (let i = n - 1; i >= 0; i--) { + x[i] = augmented[i][n]; + for (let j = i + 1; j < n; j++) { + x[i] -= augmented[i][j] * x[j]; + } + x[i] /= augmented[i][i]; + } + + return x; +} + +function varModel(x: number[], y: number[], lag: number): { + rssX: number; + rssY: number; + aic: number; +} { + // Simplified VAR model calculation + const n = x.length - lag; + + // Build design matrix + const X = Array(n).fill(null).map(() => Array(2 * lag + 1).fill(1)); + const yX = Array(n).fill(0); + const yY = Array(n).fill(0); + + for (let t = 0; t < n; t++) { + yX[t] = x[t + lag]; + yY[t] = y[t + lag]; + + for (let l = 0; l < lag; l++) { + X[t][1 + l] = x[t + lag - 1 - l]; + X[t][1 + lag + l] = y[t + lag - 1 - l]; + } + } + + // Calculate residuals for both equations + const residualsX = residuals(yX, X); + const residualsY = residuals(yY, X); + + const rssX = residualsX.reduce((sum, r) => sum + r * r, 0); + const rssY = residualsY.reduce((sum, r) => sum + r * r, 0); + + const k = 2 * lag + 1; + const aic = n * Math.log(rssX + rssY) + 2 * k; + + return { rssX, rssY, aic }; +} + +function arModel(y: number[], lag: number): { rss: number } { + const n = y.length - lag; + + // Build design matrix + const X = Array(n).fill(null).map(() => Array(lag + 1).fill(1)); + const yVec = Array(n).fill(0); + + for (let t = 0; t < n; t++) { + yVec[t] = y[t + lag]; + + for (let l = 0; l < lag; l++) { + X[t][1 + l] = y[t + lag - 1 - l]; + } + } + + const res = residuals(yVec, X); + const rss = res.reduce((sum, r) => sum + r * r, 0); + + return { rss }; +} + +function fCDF(f: number, df1: number, df2: number): number { + // Approximation for F distribution CDF + if (f <= 0) return 0; + if (f === Infinity) return 1; + + const x = df2 / (df2 + df1 * f); + return 1 - betaIncomplete(df2 / 2, df1 / 2, x); +} diff --git a/libs/utils/src/calculations/index.ts b/libs/utils/src/calculations/index.ts index 1a4874e..1a49a7d 100644 --- a/libs/utils/src/calculations/index.ts +++ b/libs/utils/src/calculations/index.ts @@ -1,166 +1,166 @@ -/** - * Comprehensive Financial Calculations Library - * - * This module provides a complete set of financial calculations for trading and investment analysis. - * Organized into logical categories for easy use and maintenance. - */ - -// Core interfaces for financial data -export interface OHLCVData { - open: number; - high: number; - low: number; - close: number; - volume: number; - timestamp: Date; -} - -export interface PriceData { - price: number; - timestamp: Date; -} - -// Financial calculation result interfaces -export interface PortfolioMetrics { - totalValue: number; - totalReturn: number; - totalReturnPercent: number; - dailyReturn: number; - dailyReturnPercent: number; - maxDrawdown: number; - sharpeRatio: number; - beta: number; - alpha: number; - volatility: number; -} - -export interface RiskMetrics { - var95: number; // Value at Risk 95% - var99: number; // Value at Risk 99% - cvar95: number; // Conditional VaR 95% - maxDrawdown: number; - volatility: number; - downside_deviation: number; - calmar_ratio: number; - sortino_ratio: number; - beta: number; - alpha: number; - sharpeRatio: number; - treynorRatio: number; - trackingError: number; - informationRatio: number; -} - -export interface TechnicalIndicators { - sma: number[]; - ema: number[]; - rsi: number[]; - macd: { macd: number[], signal: number[], histogram: number[] }; - bollinger: { upper: number[], middle: number[], lower: number[] }; - atr: number[]; - stochastic: { k: number[], d: number[] }; - williams_r: number[]; - cci: number[]; - momentum: number[]; - roc: number[]; -} - -// Additional interfaces for new functionality -export interface TradeExecution { - entry: number; - exit: number; - peak?: number; - trough?: number; - volume: number; - timestamp: Date; -} - -export interface MarketData { - price: number; - volume: number; - timestamp: Date; - bid?: number; - ask?: number; - bidSize?: number; - askSize?: number; -} - -export interface BacktestResults { - trades: TradeExecution[]; - equityCurve: Array<{ value: number; date: Date }>; - - performance: PortfolioMetrics; - riskMetrics: RiskMetrics; - drawdownAnalysis: any; // Import from performance-metrics -} - -// Export all calculation functions -export * from './basic-calculations'; -export * from './technical-indicators'; -export * from './risk-metrics'; -export * from './portfolio-analytics'; -export * from './options-pricing'; -export * from './position-sizing'; -export * from './performance-metrics'; -export * from './market-statistics'; -export * from './volatility-models'; -export * from './correlation-analysis'; - -// Import specific functions for convenience functions -import { - sma, ema, rsi, macd, bollingerBands, atr, stochastic, - williamsR, cci, momentum, roc -} from './technical-indicators'; -import { calculateRiskMetrics } from './risk-metrics'; -import { calculateStrategyMetrics } from './performance-metrics'; - -// Convenience function to calculate all technical indicators at once -export function calculateAllTechnicalIndicators( - ohlcv: OHLCVData[], - periods: { sma?: number; ema?: number; rsi?: number; atr?: number } = {} -): TechnicalIndicators { - const { - sma: smaPeriod = 20, - ema: emaPeriod = 20, - rsi: rsiPeriod = 14, - atr: atrPeriod = 14 - } = periods; - - const closes = ohlcv.map(d => d.close); - - return { - sma: sma(closes, smaPeriod), - ema: ema(closes, emaPeriod), - rsi: rsi(closes, rsiPeriod), - macd: macd(closes), - bollinger: bollingerBands(closes), - atr: atr(ohlcv, atrPeriod), - stochastic: stochastic(ohlcv), - williams_r: williamsR(ohlcv), - cci: cci(ohlcv), - momentum: momentum(closes), - roc: roc(closes) - }; -} - -// Convenience function for comprehensive portfolio analysis -export function analyzePortfolio( - returns: number[], - equityCurve: Array<{ value: number; date: Date }>, - benchmarkReturns?: number[], - riskFreeRate: number = 0.02 -): { - performance: PortfolioMetrics; - risk: RiskMetrics; - trades?: any; - drawdown?: any; -} { - const performance = calculateStrategyMetrics(equityCurve, benchmarkReturns, riskFreeRate); - const equityValues = equityCurve.map(point => point.value); - const risk = calculateRiskMetrics(returns, equityValues, benchmarkReturns, riskFreeRate); - - return { - performance, - risk - }; -} +/** + * Comprehensive Financial Calculations Library + * + * This module provides a complete set of financial calculations for trading and investment analysis. + * Organized into logical categories for easy use and maintenance. + */ + +// Core interfaces for financial data +export interface OHLCVData { + open: number; + high: number; + low: number; + close: number; + volume: number; + timestamp: Date; +} + +export interface PriceData { + price: number; + timestamp: Date; +} + +// Financial calculation result interfaces +export interface PortfolioMetrics { + totalValue: number; + totalReturn: number; + totalReturnPercent: number; + dailyReturn: number; + dailyReturnPercent: number; + maxDrawdown: number; + sharpeRatio: number; + beta: number; + alpha: number; + volatility: number; +} + +export interface RiskMetrics { + var95: number; // Value at Risk 95% + var99: number; // Value at Risk 99% + cvar95: number; // Conditional VaR 95% + maxDrawdown: number; + volatility: number; + downside_deviation: number; + calmar_ratio: number; + sortino_ratio: number; + beta: number; + alpha: number; + sharpeRatio: number; + treynorRatio: number; + trackingError: number; + informationRatio: number; +} + +export interface TechnicalIndicators { + sma: number[]; + ema: number[]; + rsi: number[]; + macd: { macd: number[], signal: number[], histogram: number[] }; + bollinger: { upper: number[], middle: number[], lower: number[] }; + atr: number[]; + stochastic: { k: number[], d: number[] }; + williams_r: number[]; + cci: number[]; + momentum: number[]; + roc: number[]; +} + +// Additional interfaces for new functionality +export interface TradeExecution { + entry: number; + exit: number; + peak?: number; + trough?: number; + volume: number; + timestamp: Date; +} + +export interface MarketData { + price: number; + volume: number; + timestamp: Date; + bid?: number; + ask?: number; + bidSize?: number; + askSize?: number; +} + +export interface BacktestResults { + trades: TradeExecution[]; + equityCurve: Array<{ value: number; date: Date }>; + + performance: PortfolioMetrics; + riskMetrics: RiskMetrics; + drawdownAnalysis: any; // Import from performance-metrics +} + +// Export all calculation functions +export * from './basic-calculations'; +export * from './technical-indicators'; +export * from './risk-metrics'; +export * from './portfolio-analytics'; +export * from './options-pricing'; +export * from './position-sizing'; +export * from './performance-metrics'; +export * from './market-statistics'; +export * from './volatility-models'; +export * from './correlation-analysis'; + +// Import specific functions for convenience functions +import { + sma, ema, rsi, macd, bollingerBands, atr, stochastic, + williamsR, cci, momentum, roc +} from './technical-indicators'; +import { calculateRiskMetrics } from './risk-metrics'; +import { calculateStrategyMetrics } from './performance-metrics'; + +// Convenience function to calculate all technical indicators at once +export function calculateAllTechnicalIndicators( + ohlcv: OHLCVData[], + periods: { sma?: number; ema?: number; rsi?: number; atr?: number } = {} +): TechnicalIndicators { + const { + sma: smaPeriod = 20, + ema: emaPeriod = 20, + rsi: rsiPeriod = 14, + atr: atrPeriod = 14 + } = periods; + + const closes = ohlcv.map(d => d.close); + + return { + sma: sma(closes, smaPeriod), + ema: ema(closes, emaPeriod), + rsi: rsi(closes, rsiPeriod), + macd: macd(closes), + bollinger: bollingerBands(closes), + atr: atr(ohlcv, atrPeriod), + stochastic: stochastic(ohlcv), + williams_r: williamsR(ohlcv), + cci: cci(ohlcv), + momentum: momentum(closes), + roc: roc(closes) + }; +} + +// Convenience function for comprehensive portfolio analysis +export function analyzePortfolio( + returns: number[], + equityCurve: Array<{ value: number; date: Date }>, + benchmarkReturns?: number[], + riskFreeRate: number = 0.02 +): { + performance: PortfolioMetrics; + risk: RiskMetrics; + trades?: any; + drawdown?: any; +} { + const performance = calculateStrategyMetrics(equityCurve, benchmarkReturns, riskFreeRate); + const equityValues = equityCurve.map(point => point.value); + const risk = calculateRiskMetrics(returns, equityValues, benchmarkReturns, riskFreeRate); + + return { + performance, + risk + }; +} diff --git a/libs/utils/src/calculations/market-statistics.ts b/libs/utils/src/calculations/market-statistics.ts index 52a2843..18dd5f5 100644 --- a/libs/utils/src/calculations/market-statistics.ts +++ b/libs/utils/src/calculations/market-statistics.ts @@ -1,985 +1,985 @@ -/** - * Market Statistics and Microstructure Analysis - * Tools for analyzing market behavior, liquidity, and trading patterns - */ - -// Local interface definition to avoid circular dependency -interface OHLCVData { - open: number; - high: number; - low: number; - close: number; - volume: number; - timestamp: Date; -} - -export interface LiquidityMetrics { - bidAskSpread: number; - relativeSpread: number; - effectiveSpread: number; - priceImpact: number; - marketDepth: number; - turnoverRatio: number; - volumeWeightedSpread: number; -} - -export interface MarketMicrostructure { - tickSize: number; - averageTradeSize: number; - tradingFrequency: number; - marketImpactCoefficient: number; - informationShare: number; - orderImbalance: number; -} - -export interface TradingSessionStats { - openPrice: number; - closePrice: number; - highPrice: number; - lowPrice: number; - volume: number; - vwap: number; - numberOfTrades: number; - averageTradeSize: number; - volatility: number; -} - -export interface MarketRegime { - regime: 'trending' | 'ranging' | 'volatile' | 'quiet'; - confidence: number; - trendDirection?: 'up' | 'down'; - volatilityLevel: 'low' | 'medium' | 'high'; -} - -/** - * Volume Weighted Average Price (VWAP) - */ -export function VWAP(ohlcv: OHLCVData[]): number[] { - if (ohlcv.length === 0) return []; - - const vwap: number[] = []; - let cumulativeVolumePrice = 0; - let cumulativeVolume = 0; - - for (const candle of ohlcv) { - const typicalPrice = (candle.high + candle.low + candle.close) / 3; - cumulativeVolumePrice += typicalPrice * candle.volume; - cumulativeVolume += candle.volume; - - vwap.push(cumulativeVolume > 0 ? cumulativeVolumePrice / cumulativeVolume : typicalPrice); - } - - return vwap; -} - -/** - * Time Weighted Average Price (TWAP) - */ -export function TWAP(prices: number[], timeWeights?: number[]): number { - if (prices.length === 0) return 0; - - if (!timeWeights) { - return prices.reduce((sum, price) => sum + price, 0) / prices.length; - } - - if (prices.length !== timeWeights.length) { - throw new Error('Prices and time weights arrays must have the same length'); - } - - const totalWeight = timeWeights.reduce((sum, weight) => sum + weight, 0); - const weightedSum = prices.reduce((sum, price, index) => sum + price * timeWeights[index], 0); - - return totalWeight > 0 ? weightedSum / totalWeight : 0; -} - -/** - * market impact of trades - */ -export function MarketImpact( - trades: Array<{ price: number; volume: number; side: 'buy' | 'sell'; timestamp: Date }>, - benchmarkPrice: number -): { - temporaryImpact: number; - permanentImpact: number; - totalImpact: number; - priceImprovement: number; -} { - if (trades.length === 0) { - return { - temporaryImpact: 0, - permanentImpact: 0, - totalImpact: 0, - priceImprovement: 0 - }; - } - - const volumeWeightedPrice = trades.reduce((sum, trade) => sum + trade.price * trade.volume, 0) / - trades.reduce((sum, trade) => sum + trade.volume, 0); - - const totalImpact = (volumeWeightedPrice - benchmarkPrice) / benchmarkPrice; - - // Simplified impact calculation - const temporaryImpact = totalImpact * 0.6; // Temporary component - const permanentImpact = totalImpact * 0.4; // Permanent component - - const priceImprovement = trades.reduce((sum, trade) => { - const improvement = trade.side === 'buy' ? - Math.max(0, benchmarkPrice - trade.price) : - Math.max(0, trade.price - benchmarkPrice); - return sum + improvement * trade.volume; - }, 0) / trades.reduce((sum, trade) => sum + trade.volume, 0); - - return { - temporaryImpact, - permanentImpact, - totalImpact, - priceImprovement - }; -} - -/** - * liquidity metrics - */ -export function LiquidityMetrics( - ohlcv: OHLCVData[], - bidPrices: number[], - askPrices: number[], - bidSizes: number[], - askSizes: number[] -): LiquidityMetrics { - if (ohlcv.length === 0 || bidPrices.length === 0) { - return { - bidAskSpread: 0, - relativeSpread: 0, - effectiveSpread: 0, - priceImpact: 0, - marketDepth: 0, - turnoverRatio: 0, - volumeWeightedSpread: 0 - }; - } - - // Average bid-ask spread - const spreads = bidPrices.map((bid, index) => askPrices[index] - bid); - const bidAskSpread = spreads.reduce((sum, spread) => sum + spread, 0) / spreads.length; - - // Relative spread - const midPrices = bidPrices.map((bid, index) => (bid + askPrices[index]) / 2); - const averageMidPrice = midPrices.reduce((sum, mid) => sum + mid, 0) / midPrices.length; - const relativeSpread = averageMidPrice > 0 ? bidAskSpread / averageMidPrice : 0; - - // Market depth - const averageBidSize = bidSizes.reduce((sum, size) => sum + size, 0) / bidSizes.length; - const averageAskSize = askSizes.reduce((sum, size) => sum + size, 0) / askSizes.length; - const marketDepth = (averageBidSize + averageAskSize) / 2; - - // Turnover ratio - const averageVolume = ohlcv.reduce((sum, candle) => sum + candle.volume, 0) / ohlcv.length; - const averagePrice = ohlcv.reduce((sum, candle) => sum + candle.close, 0) / ohlcv.length; - const marketCap = averagePrice * 1000000; // Simplified market cap - const turnoverRatio = marketCap > 0 ? (averageVolume * averagePrice) / marketCap : 0; - - return { - bidAskSpread, - relativeSpread: relativeSpread * 100, // Convert to percentage - effectiveSpread: bidAskSpread * 0.8, // Simplified effective spread - priceImpact: relativeSpread * 2, // Simplified price impact - marketDepth, - turnoverRatio: turnoverRatio * 100, // Convert to percentage - volumeWeightedSpread: bidAskSpread // Simplified - }; -} - -/** - * Identify market regimes - */ -export function identifyMarketRegime( - ohlcv: OHLCVData[], - lookbackPeriod: number = 20 -): MarketRegime { - if (ohlcv.length < lookbackPeriod) { - return { - regime: 'quiet', - confidence: 0, - volatilityLevel: 'low' - }; - } - - const recentData = ohlcv.slice(-lookbackPeriod); - const prices = recentData.map(candle => candle.close); - const volumes = recentData.map(candle => candle.volume); - // returns and volatility - const returns = []; - for (let i = 1; i < prices.length; i++) { - returns.push((prices[i] - prices[i - 1]) / prices[i - 1]); - } - - const volatility = calculateVolatility(returns); - const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length; - - // Trend analysis - const firstPrice = prices[0]; - const lastPrice = prices[prices.length - 1]; - const trendStrength = Math.abs((lastPrice - firstPrice) / firstPrice); - - // Determine volatility level - let volatilityLevel: 'low' | 'medium' | 'high'; - if (volatility < 0.01) volatilityLevel = 'low'; - else if (volatility < 0.03) volatilityLevel = 'medium'; - else volatilityLevel = 'high'; - - // Determine regime - let regime: 'trending' | 'ranging' | 'volatile' | 'quiet'; - let confidence = 0; - let trendDirection: 'up' | 'down' | undefined; - - if (volatility < 0.005) { - regime = 'quiet'; - confidence = 0.8; - } else if (volatility > 0.04) { - regime = 'volatile'; - confidence = 0.7; - } else if (trendStrength > 0.05) { - regime = 'trending'; - trendDirection = lastPrice > firstPrice ? 'up' : 'down'; - confidence = Math.min(0.9, trendStrength * 10); - } else { - regime = 'ranging'; - confidence = 0.6; - } - - return { - regime, - confidence, - trendDirection, - volatilityLevel - }; -} - -/** - * order book imbalance - */ -export function OrderBookImbalance( - bidPrices: number[], - askPrices: number[], - bidSizes: number[], - askSizes: number[], - levels: number = 5 -): number { - const levelsToAnalyze = Math.min(levels, bidPrices.length, askPrices.length); - - let totalBidVolume = 0; - let totalAskVolume = 0; - - for (let i = 0; i < levelsToAnalyze; i++) { - totalBidVolume += bidSizes[i]; - totalAskVolume += askSizes[i]; - } - - const totalVolume = totalBidVolume + totalAskVolume; - - if (totalVolume === 0) return 0; - - return (totalBidVolume - totalAskVolume) / totalVolume; -} - -/** - * intraday patterns - */ -export function IntradayPatterns( - ohlcv: OHLCVData[] -): { - hourlyReturns: { [hour: number]: number }; - hourlyVolatility: { [hour: number]: number }; - hourlyVolume: { [hour: number]: number }; - openingGap: number; - closingDrift: number; -} { - const hourlyData: { [hour: number]: { returns: number[]; volumes: number[] } } = {}; - - // Initialize hourly buckets - for (let hour = 0; hour < 24; hour++) { - hourlyData[hour] = { returns: [], volumes: [] }; - } - - // Aggregate data by hour - for (let i = 1; i < ohlcv.length; i++) { - const hour = ohlcv[i].timestamp.getHours(); - const return_ = (ohlcv[i].close - ohlcv[i - 1].close) / ohlcv[i - 1].close; - - hourlyData[hour].returns.push(return_); - hourlyData[hour].volumes.push(ohlcv[i].volume); - } - - // statistics for each hour - const hourlyReturns: { [hour: number]: number } = {}; - const hourlyVolatility: { [hour: number]: number } = {}; - const hourlyVolume: { [hour: number]: number } = {}; - - for (let hour = 0; hour < 24; hour++) { - const data = hourlyData[hour]; - hourlyReturns[hour] = data.returns.length > 0 ? - data.returns.reduce((sum, ret) => sum + ret, 0) / data.returns.length : 0; - - hourlyVolatility[hour] = calculateVolatility(data.returns); - - hourlyVolume[hour] = data.volumes.length > 0 ? - data.volumes.reduce((sum, vol) => sum + vol, 0) / data.volumes.length : 0; - } - // opening gap and closing drift - const openingGap = ohlcv.length > 1 ? - (ohlcv[1].open - ohlcv[0].close) / ohlcv[0].close : 0; - - const lastCandle = ohlcv[ohlcv.length - 1]; - const closingDrift = (lastCandle.close - lastCandle.open) / lastCandle.open; - - return { - hourlyReturns, - hourlyVolatility, - hourlyVolume, - openingGap, - closingDrift - }; -} - -/** - * price discovery metrics - */ -export function PriceDiscovery( - prices1: number[], // Prices from market 1 - prices2: number[] // Prices from market 2 -): { - informationShare1: number; - informationShare2: number; - priceLeadLag: number; // Positive if market 1 leads - cointegrationStrength: number; -} { - if (prices1.length !== prices2.length || prices1.length < 2) { - return { - informationShare1: 0.5, - informationShare2: 0.5, - priceLeadLag: 0, - cointegrationStrength: 0 - }; - } - - // returns - const returns1 = []; - const returns2 = []; - - for (let i = 1; i < prices1.length; i++) { - returns1.push((prices1[i] - prices1[i - 1]) / prices1[i - 1]); - returns2.push((prices2[i] - prices2[i - 1]) / prices2[i - 1]); - } - // correlations with lags - const correlation0 = calculateCorrelation(returns1, returns2); - const correlation1 = returns1.length > 1 ? - calculateCorrelation(returns1.slice(1), returns2.slice(0, -1)) : 0; - const correlationMinus1 = returns1.length > 1 ? - calculateCorrelation(returns1.slice(0, -1), returns2.slice(1)) : 0; - - // Price lead-lag (simplified) - const priceLeadLag = correlation1 - correlationMinus1; - - // Information shares (simplified Hasbrouck methodology) - const variance1 = calculateVariance(returns1); - const variance2 = calculateVariance(returns2); - const covariance = calculateCovariance(returns1, returns2); - - const totalVariance = variance1 + variance2 + 2 * covariance; - const informationShare1 = totalVariance > 0 ? (variance1 + covariance) / totalVariance : 0.5; - const informationShare2 = 1 - informationShare1; - - // Cointegration strength (simplified) - const cointegrationStrength = Math.abs(correlation0); - - return { - informationShare1, - informationShare2, - priceLeadLag, - cointegrationStrength - }; -} - -/** - * market stress indicators - */ -export function MarketStress( - ohlcv: OHLCVData[], - lookbackPeriod: number = 20 -): { - stressLevel: 'low' | 'medium' | 'high' | 'extreme'; - volatilityStress: number; - liquidityStress: number; - correlationStress: number; - overallStress: number; -} { - if (ohlcv.length < lookbackPeriod) { - return { - stressLevel: 'low', - volatilityStress: 0, - liquidityStress: 0, - correlationStress: 0, - overallStress: 0 - }; - } - - const recentData = ohlcv.slice(-lookbackPeriod); - const returns = []; - const volumes = []; - - for (let i = 1; i < recentData.length; i++) { - returns.push((recentData[i].close - recentData[i - 1].close) / recentData[i - 1].close); - volumes.push(recentData[i].volume); - } - // Volatility stress - const volatility = calculateVolatility(returns); - const volatilityStress = Math.min(1, volatility / 0.05); // Normalize to 5% daily vol - - // Liquidity stress (volume-based) - const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length; - const volumeVariability = calculateVolatility(volumes.map(vol => vol / averageVolume)); - const liquidityStress = Math.min(1, volumeVariability); - - // Correlation stress (simplified - would need multiple assets) - const correlationStress = 0.3; // Placeholder - - // Overall stress - const overallStress = (volatilityStress * 0.4 + liquidityStress * 0.3 + correlationStress * 0.3); - - let stressLevel: 'low' | 'medium' | 'high' | 'extreme'; - if (overallStress < 0.25) stressLevel = 'low'; - else if (overallStress < 0.5) stressLevel = 'medium'; - else if (overallStress < 0.75) stressLevel = 'high'; - else stressLevel = 'extreme'; - - return { - stressLevel, - volatilityStress, - liquidityStress, - correlationStress, - overallStress - }; -} - -/** - * realized spread - */ -export function RealizedSpread( - trades: Array<{ price: number; side: 'buy' | 'sell'; timestamp: Date }>, - midPrices: number[], - timeWindow: number = 5 // minutes -): number { - if (trades.length === 0 || midPrices.length === 0) return 0; - - let totalSpread = 0; - let count = 0; - - for (const trade of trades) { - // Find corresponding mid price - const midPrice = midPrices[0]; // Simplified - should match by timestamp - - const spread = trade.side === 'buy' ? - 2 * (trade.price - midPrice) : - 2 * (midPrice - trade.price); - - totalSpread += spread; - count++; - } - - return count > 0 ? totalSpread / count : 0; -} - -/** - * implementation shortfall - */ -export function ImplementationShortfall( - decisionPrice: number, - executionPrices: number[], - volumes: number[], - commissions: number[], - marketImpact: number[] -): { - totalShortfall: number; - delayComponent: number; - marketImpactComponent: number; - timingComponent: number; - commissionComponent: number; -} { - if (executionPrices.length !== volumes.length) { - throw new Error('Execution prices and volumes must have same length'); - } - - const totalVolume = volumes.reduce((sum, vol) => sum + vol, 0); - const weightedExecutionPrice = executionPrices.reduce((sum, price, i) => - sum + price * volumes[i], 0) / totalVolume; - - const totalCommissions = commissions.reduce((sum, comm) => sum + comm, 0); - const totalMarketImpact = marketImpact.reduce((sum, impact, i) => - sum + impact * volumes[i], 0); - - const delayComponent = weightedExecutionPrice - decisionPrice; - const marketImpactComponent = totalMarketImpact / totalVolume; - const timingComponent = 0; // Simplified - would need benchmark price evolution - const commissionComponent = totalCommissions / totalVolume; - - const totalShortfall = delayComponent + marketImpactComponent + - timingComponent + commissionComponent; - - return { - totalShortfall, - delayComponent, - marketImpactComponent, - timingComponent, - commissionComponent - }; -} - -/** - * Amihud Illiquidity Measure (price impact per unit of volume) - */ -export function amihudIlliquidity( - ohlcv: OHLCVData[], - lookbackPeriod: number = 252 -): number { - if (ohlcv.length < lookbackPeriod) return 0; - - const recentData = ohlcv.slice(-lookbackPeriod); - let illiquiditySum = 0; - let validDays = 0; - - for (const candle of recentData) { - if (candle.volume > 0) { - const dailyReturn = Math.abs((candle.close - candle.open) / candle.open); - const dollarVolume = candle.volume * candle.close; - - if (dollarVolume > 0) { - illiquiditySum += dailyReturn / dollarVolume; - validDays++; - } - } - } - - return validDays > 0 ? (illiquiditySum / validDays) * 1000000 : 0; // Scale to millions -} - -/** - * Roll's Spread Estimator (effective spread from serial covariance) - */ -export function rollSpreadEstimator(prices: number[]): number { - if (prices.length < 3) return 0; - - // Calculate price changes - const priceChanges: number[] = []; - for (let i = 1; i < prices.length; i++) { - priceChanges.push(prices[i] - prices[i - 1]); - } - - // Calculate serial covariance - let covariance = 0; - for (let i = 1; i < priceChanges.length; i++) { - covariance += priceChanges[i] * priceChanges[i - 1]; - } - covariance /= (priceChanges.length - 1); - - // Roll's estimator: spread = 2 * sqrt(-covariance) - const spread = covariance < 0 ? 2 * Math.sqrt(-covariance) : 0; - - return spread; -} - -/** - * Kyle's Lambda (price impact coefficient) - */ -export function kyleLambda( - priceChanges: number[], - orderFlow: number[] // Signed order flow (positive for buys, negative for sells) -): number { - if (priceChanges.length !== orderFlow.length || priceChanges.length < 2) return 0; - - // Calculate regression: priceChange = lambda * orderFlow + error - const n = priceChanges.length; - const meanPrice = priceChanges.reduce((sum, p) => sum + p, 0) / n; - const meanFlow = orderFlow.reduce((sum, f) => sum + f, 0) / n; - - let numerator = 0; - let denominator = 0; - - for (let i = 0; i < n; i++) { - const priceDeviation = priceChanges[i] - meanPrice; - const flowDeviation = orderFlow[i] - meanFlow; - - numerator += priceDeviation * flowDeviation; - denominator += flowDeviation * flowDeviation; - } - - return denominator > 0 ? numerator / denominator : 0; -} - -/** - * Probability of Informed Trading (PIN) - simplified version - */ -export function probabilityInformedTrading( - buyVolumes: number[], - sellVolumes: number[], - period: number = 20 -): number { - if (buyVolumes.length !== sellVolumes.length || buyVolumes.length < period) return 0; - - const recentBuys = buyVolumes.slice(-period); - const recentSells = sellVolumes.slice(-period); - - let totalImbalance = 0; - let totalVolume = 0; - - for (let i = 0; i < period; i++) { - const imbalance = Math.abs(recentBuys[i] - recentSells[i]); - const volume = recentBuys[i] + recentSells[i]; - - totalImbalance += imbalance; - totalVolume += volume; - } - - // Simplified PIN estimate based on order imbalance - return totalVolume > 0 ? totalImbalance / totalVolume : 0; -} - -/** - * Herfindahl-Hirschman Index for Volume Concentration - */ -export function volumeConcentrationHHI( - exchanges: Array<{ name: string; volume: number }> -): number { - if (exchanges.length === 0) return 0; - - const totalVolume = exchanges.reduce((sum, exchange) => sum + exchange.volume, 0); - - if (totalVolume === 0) return 0; - - let hhi = 0; - for (const exchange of exchanges) { - const marketShare = exchange.volume / totalVolume; - hhi += marketShare * marketShare; - } - - return hhi * 10000; // Scale to 0-10000 range -} -/** - * Volume Profile - */ -export function volumeProfile( - ohlcv: OHLCVData[], - priceLevels: number -): { [price: number]: number } { - const profile: { [price: number]: number } = {}; - - if (ohlcv.length === 0) return profile; - - const minPrice = Math.min(...ohlcv.map(candle => candle.low)); - const maxPrice = Math.max(...ohlcv.map(candle => candle.high)); - const priceRange = maxPrice - minPrice; - const priceIncrement = priceRange / priceLevels; - - for (let i = 0; i < priceLevels; i++) { - const priceLevel = minPrice + i * priceIncrement; - profile[priceLevel] = 0; - } - - for (const candle of ohlcv) { - const typicalPrice = (candle.high + candle.low + candle.close) / 3; - const priceLevel = minPrice + Math.floor((typicalPrice - minPrice) / priceIncrement) * priceIncrement; - if (profile[priceLevel] !== undefined) { - profile[priceLevel] += candle.volume; - } - } - - return profile; -} - -/** - * Delta Neutral Hedging Ratio - */ -export function deltaNeutralHedgingRatio( - optionDelta: number -): number { - return -optionDelta; -} - -/** - * Gamma Scalping Range - */ -export function gammaScalpingRange( - gamma: number, - theta: number, - timeIncrement: number -): number { - return Math.sqrt(2 * Math.abs(theta) * timeIncrement / gamma); -} - -/** - * Optimal Order Size (based on market impact) - */ -export function optimalOrderSize( - alpha: number, - lambda: number -): number { - return alpha / (2 * lambda); -} - -/** - * Adverse Selection Component of the Spread - */ -export function adverseSelectionComponent( - probabilityOfInformedTrader: number, - spread: number -): number { - return probabilityOfInformedTrader * spread; -} - -/** - * Inventory Risk Component of the Spread - */ -export function inventoryRiskComponent( - inventoryHoldingCost: number, - orderArrivalRate: number -): number { - return inventoryHoldingCost * Math.sqrt(orderArrivalRate); -} - -/** - * Quote Age - */ -export function quoteAge( - lastUpdate: Date -): number { - return Date.now() - lastUpdate.getTime(); -} - -/** - * Trade Classification (Lee-Ready algorithm) - */ -export function tradeClassification( - tradePrice: number, - bidPrice: number, - askPrice: number, - previousTradePrice: number -): 'buy' | 'sell' | 'unknown' { - if (tradePrice > askPrice) { - return 'buy'; - } else if (tradePrice < bidPrice) { - return 'sell'; - } else if (tradePrice >= previousTradePrice) { - return 'buy'; - } else { - return 'sell'; - } -} - -/** - * Tick Rule - */ -export function tickRule( - tradePrice: number, - previousTradePrice: number -): 'buy' | 'sell' | 'unknown' { - if (tradePrice > previousTradePrice) { - return 'buy'; - } else if (tradePrice < previousTradePrice) { - return 'sell'; - } else { - return 'unknown'; - } -} - -/** - * Amihud's Lambda Variation with High-Frequency Data - */ -export function amihudIlliquidityHFT( - priceChanges: number[], - dollarVolumes: number[], - timeDeltas: number[] -): number { - let illiquiditySum = 0; - let validTrades = 0; - - for (let i = 0; i < priceChanges.length; i++) { - if (dollarVolumes[i] > 0 && timeDeltas[i] > 0) { - illiquiditySum += Math.abs(priceChanges[i]) / (dollarVolumes[i] * timeDeltas[i]); - validTrades++; - } - } - - return validTrades > 0 ? illiquiditySum / validTrades : 0; -} - -/** - * Garman-Klass Volatility - */ -export function garmanKlassVolatility( - openPrices: number[], - highPrices: number[], - lowPrices: number[], - closePrices: number[] -): number { - if (openPrices.length !== highPrices.length || openPrices.length !== lowPrices.length || openPrices.length !== closePrices.length || openPrices.length < 2) return 0; - - let sumSquaredTerm1 = 0; - let sumSquaredTerm2 = 0; - let sumSquaredTerm3 = 0; - - for (let i = 0; i < openPrices.length; i++) { - const logHO = Math.log(highPrices[i] / openPrices[i]); - const logLO = Math.log(lowPrices[i] / openPrices[i]); - const logCO = Math.log(closePrices[i] / openPrices[i]); - - sumSquaredTerm1 += 0.5 * (logHO * logHO + logLO * logLO); - sumSquaredTerm2 += - (2 * Math.log(2) - 1) * (logCO * logCO); - } - - const garmanKlassVariance = (1 / openPrices.length) * (sumSquaredTerm1 + sumSquaredTerm2); - return Math.sqrt(garmanKlassVariance); -} - -/** - * Yang-Zhang Volatility - */ -export function yangZhangVolatility( - openPrices: number[], - highPrices: number[], - lowPrices: number[], - closePrices: number[], - previousClosePrices: number[] -): number { - if (openPrices.length !== highPrices.length || openPrices.length !== lowPrices.length || openPrices.length !== closePrices.length || openPrices.length !== previousClosePrices.length || openPrices.length < 2) return 0; - - const k = 0.34 / (1.34 + (openPrices.length + 1) / (previousClosePrices.length - 1)); - - let sumSquaredTerm1 = 0; - let sumSquaredTerm2 = 0; - let sumSquaredTerm3 = 0; - - for (let i = 0; i < openPrices.length; i++) { - const overnightReturn = Math.log(openPrices[i] / previousClosePrices[i]); - const openToHigh = Math.log(highPrices[i] / openPrices[i]); - const openToLow = Math.log(lowPrices[i] / openPrices[i]); - const closeToOpen = Math.log(closePrices[i] / openPrices[i]); - - sumSquaredTerm1 += overnightReturn * overnightReturn; - sumSquaredTerm2 += openToHigh * openToHigh; - sumSquaredTerm3 += openToLow * openToLow; - } - - const variance = sumSquaredTerm1 + k * sumSquaredTerm2 + (1 - k) * sumSquaredTerm3; - return Math.sqrt(variance); -} - -/** - * Volume Order Imbalance (VOI) - */ -export function volumeOrderImbalance( - buyVolumes: number[], - sellVolumes: number[] -): number[] { - if (buyVolumes.length !== sellVolumes.length) return []; - - const voi: number[] = []; - for (let i = 0; i < buyVolumes.length; i++) { - voi.push(buyVolumes[i] - sellVolumes[i]); - } - return voi; -} - -/** - * Cumulative Volume Delta (CVD) - */ -export function cumulativeVolumeDelta( - buyVolumes: number[], - sellVolumes: number[] -): number[] { - if (buyVolumes.length !== sellVolumes.length) return []; - - const cvd: number[] = []; - let cumulativeDelta = 0; - for (let i = 0; i < buyVolumes.length; i++) { - cumulativeDelta += buyVolumes[i] - sellVolumes[i]; - cvd.push(cumulativeDelta); - } - return cvd; -} - -/** - * Market Order Ratio - */ -export function marketOrderRatio( - marketOrders: number[], - limitOrders: number[] -): number[] { - if (marketOrders.length !== limitOrders.length) return []; - - const ratios: number[] = []; - for (let i = 0; i < marketOrders.length; i++) { - const totalOrders = marketOrders[i] + limitOrders[i]; - ratios.push(totalOrders > 0 ? marketOrders[i] / totalOrders : 0); - } - return ratios; -} - -/** - * Helper function to calculate the average of an array of numbers - */ - -function average(arr: number[]): number { - if (arr.length === 0) return 0; - return arr.reduce((a, b) => a + b, 0) / arr.length; -} - -function calculateVolatility(returns: number[]): number { - if (returns.length < 2) return 0; - - const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); - - return Math.sqrt(variance); -} - -function calculateCorrelation(x: number[], y: number[]): number { - if (x.length !== y.length || x.length < 2) return 0; - - const n = x.length; - const meanX = x.reduce((sum, val) => sum + val, 0) / n; - const meanY = y.reduce((sum, val) => sum + val, 0) / n; - - let numerator = 0; - let sumXSquared = 0; - let sumYSquared = 0; - - for (let i = 0; i < n; i++) { - const xDiff = x[i] - meanX; - const yDiff = y[i] - meanY; - - numerator += xDiff * yDiff; - sumXSquared += xDiff * xDiff; - sumYSquared += yDiff * yDiff; - } - - const denominator = Math.sqrt(sumXSquared * sumYSquared); - - return denominator > 0 ? numerator / denominator : 0; -} - -function calculateVariance(values: number[]): number { - if (values.length < 2) return 0; - - const mean = values.reduce((sum, val) => sum + val, 0) / values.length; - return values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / (values.length - 1); -} - -function calculateCovariance(x: number[], y: number[]): number { - if (x.length !== y.length || x.length < 2) return 0; - - const n = x.length; - const meanX = x.reduce((sum, val) => sum + val, 0) / n; - const meanY = y.reduce((sum, val) => sum + val, 0) / n; - - return x.reduce((sum, val, i) => sum + (val - meanX) * (y[i] - meanY), 0) / (n - 1); -} +/** + * Market Statistics and Microstructure Analysis + * Tools for analyzing market behavior, liquidity, and trading patterns + */ + +// Local interface definition to avoid circular dependency +interface OHLCVData { + open: number; + high: number; + low: number; + close: number; + volume: number; + timestamp: Date; +} + +export interface LiquidityMetrics { + bidAskSpread: number; + relativeSpread: number; + effectiveSpread: number; + priceImpact: number; + marketDepth: number; + turnoverRatio: number; + volumeWeightedSpread: number; +} + +export interface MarketMicrostructure { + tickSize: number; + averageTradeSize: number; + tradingFrequency: number; + marketImpactCoefficient: number; + informationShare: number; + orderImbalance: number; +} + +export interface TradingSessionStats { + openPrice: number; + closePrice: number; + highPrice: number; + lowPrice: number; + volume: number; + vwap: number; + numberOfTrades: number; + averageTradeSize: number; + volatility: number; +} + +export interface MarketRegime { + regime: 'trending' | 'ranging' | 'volatile' | 'quiet'; + confidence: number; + trendDirection?: 'up' | 'down'; + volatilityLevel: 'low' | 'medium' | 'high'; +} + +/** + * Volume Weighted Average Price (VWAP) + */ +export function VWAP(ohlcv: OHLCVData[]): number[] { + if (ohlcv.length === 0) return []; + + const vwap: number[] = []; + let cumulativeVolumePrice = 0; + let cumulativeVolume = 0; + + for (const candle of ohlcv) { + const typicalPrice = (candle.high + candle.low + candle.close) / 3; + cumulativeVolumePrice += typicalPrice * candle.volume; + cumulativeVolume += candle.volume; + + vwap.push(cumulativeVolume > 0 ? cumulativeVolumePrice / cumulativeVolume : typicalPrice); + } + + return vwap; +} + +/** + * Time Weighted Average Price (TWAP) + */ +export function TWAP(prices: number[], timeWeights?: number[]): number { + if (prices.length === 0) return 0; + + if (!timeWeights) { + return prices.reduce((sum, price) => sum + price, 0) / prices.length; + } + + if (prices.length !== timeWeights.length) { + throw new Error('Prices and time weights arrays must have the same length'); + } + + const totalWeight = timeWeights.reduce((sum, weight) => sum + weight, 0); + const weightedSum = prices.reduce((sum, price, index) => sum + price * timeWeights[index], 0); + + return totalWeight > 0 ? weightedSum / totalWeight : 0; +} + +/** + * market impact of trades + */ +export function MarketImpact( + trades: Array<{ price: number; volume: number; side: 'buy' | 'sell'; timestamp: Date }>, + benchmarkPrice: number +): { + temporaryImpact: number; + permanentImpact: number; + totalImpact: number; + priceImprovement: number; +} { + if (trades.length === 0) { + return { + temporaryImpact: 0, + permanentImpact: 0, + totalImpact: 0, + priceImprovement: 0 + }; + } + + const volumeWeightedPrice = trades.reduce((sum, trade) => sum + trade.price * trade.volume, 0) / + trades.reduce((sum, trade) => sum + trade.volume, 0); + + const totalImpact = (volumeWeightedPrice - benchmarkPrice) / benchmarkPrice; + + // Simplified impact calculation + const temporaryImpact = totalImpact * 0.6; // Temporary component + const permanentImpact = totalImpact * 0.4; // Permanent component + + const priceImprovement = trades.reduce((sum, trade) => { + const improvement = trade.side === 'buy' ? + Math.max(0, benchmarkPrice - trade.price) : + Math.max(0, trade.price - benchmarkPrice); + return sum + improvement * trade.volume; + }, 0) / trades.reduce((sum, trade) => sum + trade.volume, 0); + + return { + temporaryImpact, + permanentImpact, + totalImpact, + priceImprovement + }; +} + +/** + * liquidity metrics + */ +export function LiquidityMetrics( + ohlcv: OHLCVData[], + bidPrices: number[], + askPrices: number[], + bidSizes: number[], + askSizes: number[] +): LiquidityMetrics { + if (ohlcv.length === 0 || bidPrices.length === 0) { + return { + bidAskSpread: 0, + relativeSpread: 0, + effectiveSpread: 0, + priceImpact: 0, + marketDepth: 0, + turnoverRatio: 0, + volumeWeightedSpread: 0 + }; + } + + // Average bid-ask spread + const spreads = bidPrices.map((bid, index) => askPrices[index] - bid); + const bidAskSpread = spreads.reduce((sum, spread) => sum + spread, 0) / spreads.length; + + // Relative spread + const midPrices = bidPrices.map((bid, index) => (bid + askPrices[index]) / 2); + const averageMidPrice = midPrices.reduce((sum, mid) => sum + mid, 0) / midPrices.length; + const relativeSpread = averageMidPrice > 0 ? bidAskSpread / averageMidPrice : 0; + + // Market depth + const averageBidSize = bidSizes.reduce((sum, size) => sum + size, 0) / bidSizes.length; + const averageAskSize = askSizes.reduce((sum, size) => sum + size, 0) / askSizes.length; + const marketDepth = (averageBidSize + averageAskSize) / 2; + + // Turnover ratio + const averageVolume = ohlcv.reduce((sum, candle) => sum + candle.volume, 0) / ohlcv.length; + const averagePrice = ohlcv.reduce((sum, candle) => sum + candle.close, 0) / ohlcv.length; + const marketCap = averagePrice * 1000000; // Simplified market cap + const turnoverRatio = marketCap > 0 ? (averageVolume * averagePrice) / marketCap : 0; + + return { + bidAskSpread, + relativeSpread: relativeSpread * 100, // Convert to percentage + effectiveSpread: bidAskSpread * 0.8, // Simplified effective spread + priceImpact: relativeSpread * 2, // Simplified price impact + marketDepth, + turnoverRatio: turnoverRatio * 100, // Convert to percentage + volumeWeightedSpread: bidAskSpread // Simplified + }; +} + +/** + * Identify market regimes + */ +export function identifyMarketRegime( + ohlcv: OHLCVData[], + lookbackPeriod: number = 20 +): MarketRegime { + if (ohlcv.length < lookbackPeriod) { + return { + regime: 'quiet', + confidence: 0, + volatilityLevel: 'low' + }; + } + + const recentData = ohlcv.slice(-lookbackPeriod); + const prices = recentData.map(candle => candle.close); + const volumes = recentData.map(candle => candle.volume); + // returns and volatility + const returns = []; + for (let i = 1; i < prices.length; i++) { + returns.push((prices[i] - prices[i - 1]) / prices[i - 1]); + } + + const volatility = calculateVolatility(returns); + const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length; + + // Trend analysis + const firstPrice = prices[0]; + const lastPrice = prices[prices.length - 1]; + const trendStrength = Math.abs((lastPrice - firstPrice) / firstPrice); + + // Determine volatility level + let volatilityLevel: 'low' | 'medium' | 'high'; + if (volatility < 0.01) volatilityLevel = 'low'; + else if (volatility < 0.03) volatilityLevel = 'medium'; + else volatilityLevel = 'high'; + + // Determine regime + let regime: 'trending' | 'ranging' | 'volatile' | 'quiet'; + let confidence = 0; + let trendDirection: 'up' | 'down' | undefined; + + if (volatility < 0.005) { + regime = 'quiet'; + confidence = 0.8; + } else if (volatility > 0.04) { + regime = 'volatile'; + confidence = 0.7; + } else if (trendStrength > 0.05) { + regime = 'trending'; + trendDirection = lastPrice > firstPrice ? 'up' : 'down'; + confidence = Math.min(0.9, trendStrength * 10); + } else { + regime = 'ranging'; + confidence = 0.6; + } + + return { + regime, + confidence, + trendDirection, + volatilityLevel + }; +} + +/** + * order book imbalance + */ +export function OrderBookImbalance( + bidPrices: number[], + askPrices: number[], + bidSizes: number[], + askSizes: number[], + levels: number = 5 +): number { + const levelsToAnalyze = Math.min(levels, bidPrices.length, askPrices.length); + + let totalBidVolume = 0; + let totalAskVolume = 0; + + for (let i = 0; i < levelsToAnalyze; i++) { + totalBidVolume += bidSizes[i]; + totalAskVolume += askSizes[i]; + } + + const totalVolume = totalBidVolume + totalAskVolume; + + if (totalVolume === 0) return 0; + + return (totalBidVolume - totalAskVolume) / totalVolume; +} + +/** + * intraday patterns + */ +export function IntradayPatterns( + ohlcv: OHLCVData[] +): { + hourlyReturns: { [hour: number]: number }; + hourlyVolatility: { [hour: number]: number }; + hourlyVolume: { [hour: number]: number }; + openingGap: number; + closingDrift: number; +} { + const hourlyData: { [hour: number]: { returns: number[]; volumes: number[] } } = {}; + + // Initialize hourly buckets + for (let hour = 0; hour < 24; hour++) { + hourlyData[hour] = { returns: [], volumes: [] }; + } + + // Aggregate data by hour + for (let i = 1; i < ohlcv.length; i++) { + const hour = ohlcv[i].timestamp.getHours(); + const return_ = (ohlcv[i].close - ohlcv[i - 1].close) / ohlcv[i - 1].close; + + hourlyData[hour].returns.push(return_); + hourlyData[hour].volumes.push(ohlcv[i].volume); + } + + // statistics for each hour + const hourlyReturns: { [hour: number]: number } = {}; + const hourlyVolatility: { [hour: number]: number } = {}; + const hourlyVolume: { [hour: number]: number } = {}; + + for (let hour = 0; hour < 24; hour++) { + const data = hourlyData[hour]; + hourlyReturns[hour] = data.returns.length > 0 ? + data.returns.reduce((sum, ret) => sum + ret, 0) / data.returns.length : 0; + + hourlyVolatility[hour] = calculateVolatility(data.returns); + + hourlyVolume[hour] = data.volumes.length > 0 ? + data.volumes.reduce((sum, vol) => sum + vol, 0) / data.volumes.length : 0; + } + // opening gap and closing drift + const openingGap = ohlcv.length > 1 ? + (ohlcv[1].open - ohlcv[0].close) / ohlcv[0].close : 0; + + const lastCandle = ohlcv[ohlcv.length - 1]; + const closingDrift = (lastCandle.close - lastCandle.open) / lastCandle.open; + + return { + hourlyReturns, + hourlyVolatility, + hourlyVolume, + openingGap, + closingDrift + }; +} + +/** + * price discovery metrics + */ +export function PriceDiscovery( + prices1: number[], // Prices from market 1 + prices2: number[] // Prices from market 2 +): { + informationShare1: number; + informationShare2: number; + priceLeadLag: number; // Positive if market 1 leads + cointegrationStrength: number; +} { + if (prices1.length !== prices2.length || prices1.length < 2) { + return { + informationShare1: 0.5, + informationShare2: 0.5, + priceLeadLag: 0, + cointegrationStrength: 0 + }; + } + + // returns + const returns1 = []; + const returns2 = []; + + for (let i = 1; i < prices1.length; i++) { + returns1.push((prices1[i] - prices1[i - 1]) / prices1[i - 1]); + returns2.push((prices2[i] - prices2[i - 1]) / prices2[i - 1]); + } + // correlations with lags + const correlation0 = calculateCorrelation(returns1, returns2); + const correlation1 = returns1.length > 1 ? + calculateCorrelation(returns1.slice(1), returns2.slice(0, -1)) : 0; + const correlationMinus1 = returns1.length > 1 ? + calculateCorrelation(returns1.slice(0, -1), returns2.slice(1)) : 0; + + // Price lead-lag (simplified) + const priceLeadLag = correlation1 - correlationMinus1; + + // Information shares (simplified Hasbrouck methodology) + const variance1 = calculateVariance(returns1); + const variance2 = calculateVariance(returns2); + const covariance = calculateCovariance(returns1, returns2); + + const totalVariance = variance1 + variance2 + 2 * covariance; + const informationShare1 = totalVariance > 0 ? (variance1 + covariance) / totalVariance : 0.5; + const informationShare2 = 1 - informationShare1; + + // Cointegration strength (simplified) + const cointegrationStrength = Math.abs(correlation0); + + return { + informationShare1, + informationShare2, + priceLeadLag, + cointegrationStrength + }; +} + +/** + * market stress indicators + */ +export function MarketStress( + ohlcv: OHLCVData[], + lookbackPeriod: number = 20 +): { + stressLevel: 'low' | 'medium' | 'high' | 'extreme'; + volatilityStress: number; + liquidityStress: number; + correlationStress: number; + overallStress: number; +} { + if (ohlcv.length < lookbackPeriod) { + return { + stressLevel: 'low', + volatilityStress: 0, + liquidityStress: 0, + correlationStress: 0, + overallStress: 0 + }; + } + + const recentData = ohlcv.slice(-lookbackPeriod); + const returns = []; + const volumes = []; + + for (let i = 1; i < recentData.length; i++) { + returns.push((recentData[i].close - recentData[i - 1].close) / recentData[i - 1].close); + volumes.push(recentData[i].volume); + } + // Volatility stress + const volatility = calculateVolatility(returns); + const volatilityStress = Math.min(1, volatility / 0.05); // Normalize to 5% daily vol + + // Liquidity stress (volume-based) + const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length; + const volumeVariability = calculateVolatility(volumes.map(vol => vol / averageVolume)); + const liquidityStress = Math.min(1, volumeVariability); + + // Correlation stress (simplified - would need multiple assets) + const correlationStress = 0.3; // Placeholder + + // Overall stress + const overallStress = (volatilityStress * 0.4 + liquidityStress * 0.3 + correlationStress * 0.3); + + let stressLevel: 'low' | 'medium' | 'high' | 'extreme'; + if (overallStress < 0.25) stressLevel = 'low'; + else if (overallStress < 0.5) stressLevel = 'medium'; + else if (overallStress < 0.75) stressLevel = 'high'; + else stressLevel = 'extreme'; + + return { + stressLevel, + volatilityStress, + liquidityStress, + correlationStress, + overallStress + }; +} + +/** + * realized spread + */ +export function RealizedSpread( + trades: Array<{ price: number; side: 'buy' | 'sell'; timestamp: Date }>, + midPrices: number[], + timeWindow: number = 5 // minutes +): number { + if (trades.length === 0 || midPrices.length === 0) return 0; + + let totalSpread = 0; + let count = 0; + + for (const trade of trades) { + // Find corresponding mid price + const midPrice = midPrices[0]; // Simplified - should match by timestamp + + const spread = trade.side === 'buy' ? + 2 * (trade.price - midPrice) : + 2 * (midPrice - trade.price); + + totalSpread += spread; + count++; + } + + return count > 0 ? totalSpread / count : 0; +} + +/** + * implementation shortfall + */ +export function ImplementationShortfall( + decisionPrice: number, + executionPrices: number[], + volumes: number[], + commissions: number[], + marketImpact: number[] +): { + totalShortfall: number; + delayComponent: number; + marketImpactComponent: number; + timingComponent: number; + commissionComponent: number; +} { + if (executionPrices.length !== volumes.length) { + throw new Error('Execution prices and volumes must have same length'); + } + + const totalVolume = volumes.reduce((sum, vol) => sum + vol, 0); + const weightedExecutionPrice = executionPrices.reduce((sum, price, i) => + sum + price * volumes[i], 0) / totalVolume; + + const totalCommissions = commissions.reduce((sum, comm) => sum + comm, 0); + const totalMarketImpact = marketImpact.reduce((sum, impact, i) => + sum + impact * volumes[i], 0); + + const delayComponent = weightedExecutionPrice - decisionPrice; + const marketImpactComponent = totalMarketImpact / totalVolume; + const timingComponent = 0; // Simplified - would need benchmark price evolution + const commissionComponent = totalCommissions / totalVolume; + + const totalShortfall = delayComponent + marketImpactComponent + + timingComponent + commissionComponent; + + return { + totalShortfall, + delayComponent, + marketImpactComponent, + timingComponent, + commissionComponent + }; +} + +/** + * Amihud Illiquidity Measure (price impact per unit of volume) + */ +export function amihudIlliquidity( + ohlcv: OHLCVData[], + lookbackPeriod: number = 252 +): number { + if (ohlcv.length < lookbackPeriod) return 0; + + const recentData = ohlcv.slice(-lookbackPeriod); + let illiquiditySum = 0; + let validDays = 0; + + for (const candle of recentData) { + if (candle.volume > 0) { + const dailyReturn = Math.abs((candle.close - candle.open) / candle.open); + const dollarVolume = candle.volume * candle.close; + + if (dollarVolume > 0) { + illiquiditySum += dailyReturn / dollarVolume; + validDays++; + } + } + } + + return validDays > 0 ? (illiquiditySum / validDays) * 1000000 : 0; // Scale to millions +} + +/** + * Roll's Spread Estimator (effective spread from serial covariance) + */ +export function rollSpreadEstimator(prices: number[]): number { + if (prices.length < 3) return 0; + + // Calculate price changes + const priceChanges: number[] = []; + for (let i = 1; i < prices.length; i++) { + priceChanges.push(prices[i] - prices[i - 1]); + } + + // Calculate serial covariance + let covariance = 0; + for (let i = 1; i < priceChanges.length; i++) { + covariance += priceChanges[i] * priceChanges[i - 1]; + } + covariance /= (priceChanges.length - 1); + + // Roll's estimator: spread = 2 * sqrt(-covariance) + const spread = covariance < 0 ? 2 * Math.sqrt(-covariance) : 0; + + return spread; +} + +/** + * Kyle's Lambda (price impact coefficient) + */ +export function kyleLambda( + priceChanges: number[], + orderFlow: number[] // Signed order flow (positive for buys, negative for sells) +): number { + if (priceChanges.length !== orderFlow.length || priceChanges.length < 2) return 0; + + // Calculate regression: priceChange = lambda * orderFlow + error + const n = priceChanges.length; + const meanPrice = priceChanges.reduce((sum, p) => sum + p, 0) / n; + const meanFlow = orderFlow.reduce((sum, f) => sum + f, 0) / n; + + let numerator = 0; + let denominator = 0; + + for (let i = 0; i < n; i++) { + const priceDeviation = priceChanges[i] - meanPrice; + const flowDeviation = orderFlow[i] - meanFlow; + + numerator += priceDeviation * flowDeviation; + denominator += flowDeviation * flowDeviation; + } + + return denominator > 0 ? numerator / denominator : 0; +} + +/** + * Probability of Informed Trading (PIN) - simplified version + */ +export function probabilityInformedTrading( + buyVolumes: number[], + sellVolumes: number[], + period: number = 20 +): number { + if (buyVolumes.length !== sellVolumes.length || buyVolumes.length < period) return 0; + + const recentBuys = buyVolumes.slice(-period); + const recentSells = sellVolumes.slice(-period); + + let totalImbalance = 0; + let totalVolume = 0; + + for (let i = 0; i < period; i++) { + const imbalance = Math.abs(recentBuys[i] - recentSells[i]); + const volume = recentBuys[i] + recentSells[i]; + + totalImbalance += imbalance; + totalVolume += volume; + } + + // Simplified PIN estimate based on order imbalance + return totalVolume > 0 ? totalImbalance / totalVolume : 0; +} + +/** + * Herfindahl-Hirschman Index for Volume Concentration + */ +export function volumeConcentrationHHI( + exchanges: Array<{ name: string; volume: number }> +): number { + if (exchanges.length === 0) return 0; + + const totalVolume = exchanges.reduce((sum, exchange) => sum + exchange.volume, 0); + + if (totalVolume === 0) return 0; + + let hhi = 0; + for (const exchange of exchanges) { + const marketShare = exchange.volume / totalVolume; + hhi += marketShare * marketShare; + } + + return hhi * 10000; // Scale to 0-10000 range +} +/** + * Volume Profile + */ +export function volumeProfile( + ohlcv: OHLCVData[], + priceLevels: number +): { [price: number]: number } { + const profile: { [price: number]: number } = {}; + + if (ohlcv.length === 0) return profile; + + const minPrice = Math.min(...ohlcv.map(candle => candle.low)); + const maxPrice = Math.max(...ohlcv.map(candle => candle.high)); + const priceRange = maxPrice - minPrice; + const priceIncrement = priceRange / priceLevels; + + for (let i = 0; i < priceLevels; i++) { + const priceLevel = minPrice + i * priceIncrement; + profile[priceLevel] = 0; + } + + for (const candle of ohlcv) { + const typicalPrice = (candle.high + candle.low + candle.close) / 3; + const priceLevel = minPrice + Math.floor((typicalPrice - minPrice) / priceIncrement) * priceIncrement; + if (profile[priceLevel] !== undefined) { + profile[priceLevel] += candle.volume; + } + } + + return profile; +} + +/** + * Delta Neutral Hedging Ratio + */ +export function deltaNeutralHedgingRatio( + optionDelta: number +): number { + return -optionDelta; +} + +/** + * Gamma Scalping Range + */ +export function gammaScalpingRange( + gamma: number, + theta: number, + timeIncrement: number +): number { + return Math.sqrt(2 * Math.abs(theta) * timeIncrement / gamma); +} + +/** + * Optimal Order Size (based on market impact) + */ +export function optimalOrderSize( + alpha: number, + lambda: number +): number { + return alpha / (2 * lambda); +} + +/** + * Adverse Selection Component of the Spread + */ +export function adverseSelectionComponent( + probabilityOfInformedTrader: number, + spread: number +): number { + return probabilityOfInformedTrader * spread; +} + +/** + * Inventory Risk Component of the Spread + */ +export function inventoryRiskComponent( + inventoryHoldingCost: number, + orderArrivalRate: number +): number { + return inventoryHoldingCost * Math.sqrt(orderArrivalRate); +} + +/** + * Quote Age + */ +export function quoteAge( + lastUpdate: Date +): number { + return Date.now() - lastUpdate.getTime(); +} + +/** + * Trade Classification (Lee-Ready algorithm) + */ +export function tradeClassification( + tradePrice: number, + bidPrice: number, + askPrice: number, + previousTradePrice: number +): 'buy' | 'sell' | 'unknown' { + if (tradePrice > askPrice) { + return 'buy'; + } else if (tradePrice < bidPrice) { + return 'sell'; + } else if (tradePrice >= previousTradePrice) { + return 'buy'; + } else { + return 'sell'; + } +} + +/** + * Tick Rule + */ +export function tickRule( + tradePrice: number, + previousTradePrice: number +): 'buy' | 'sell' | 'unknown' { + if (tradePrice > previousTradePrice) { + return 'buy'; + } else if (tradePrice < previousTradePrice) { + return 'sell'; + } else { + return 'unknown'; + } +} + +/** + * Amihud's Lambda Variation with High-Frequency Data + */ +export function amihudIlliquidityHFT( + priceChanges: number[], + dollarVolumes: number[], + timeDeltas: number[] +): number { + let illiquiditySum = 0; + let validTrades = 0; + + for (let i = 0; i < priceChanges.length; i++) { + if (dollarVolumes[i] > 0 && timeDeltas[i] > 0) { + illiquiditySum += Math.abs(priceChanges[i]) / (dollarVolumes[i] * timeDeltas[i]); + validTrades++; + } + } + + return validTrades > 0 ? illiquiditySum / validTrades : 0; +} + +/** + * Garman-Klass Volatility + */ +export function garmanKlassVolatility( + openPrices: number[], + highPrices: number[], + lowPrices: number[], + closePrices: number[] +): number { + if (openPrices.length !== highPrices.length || openPrices.length !== lowPrices.length || openPrices.length !== closePrices.length || openPrices.length < 2) return 0; + + let sumSquaredTerm1 = 0; + let sumSquaredTerm2 = 0; + let sumSquaredTerm3 = 0; + + for (let i = 0; i < openPrices.length; i++) { + const logHO = Math.log(highPrices[i] / openPrices[i]); + const logLO = Math.log(lowPrices[i] / openPrices[i]); + const logCO = Math.log(closePrices[i] / openPrices[i]); + + sumSquaredTerm1 += 0.5 * (logHO * logHO + logLO * logLO); + sumSquaredTerm2 += - (2 * Math.log(2) - 1) * (logCO * logCO); + } + + const garmanKlassVariance = (1 / openPrices.length) * (sumSquaredTerm1 + sumSquaredTerm2); + return Math.sqrt(garmanKlassVariance); +} + +/** + * Yang-Zhang Volatility + */ +export function yangZhangVolatility( + openPrices: number[], + highPrices: number[], + lowPrices: number[], + closePrices: number[], + previousClosePrices: number[] +): number { + if (openPrices.length !== highPrices.length || openPrices.length !== lowPrices.length || openPrices.length !== closePrices.length || openPrices.length !== previousClosePrices.length || openPrices.length < 2) return 0; + + const k = 0.34 / (1.34 + (openPrices.length + 1) / (previousClosePrices.length - 1)); + + let sumSquaredTerm1 = 0; + let sumSquaredTerm2 = 0; + let sumSquaredTerm3 = 0; + + for (let i = 0; i < openPrices.length; i++) { + const overnightReturn = Math.log(openPrices[i] / previousClosePrices[i]); + const openToHigh = Math.log(highPrices[i] / openPrices[i]); + const openToLow = Math.log(lowPrices[i] / openPrices[i]); + const closeToOpen = Math.log(closePrices[i] / openPrices[i]); + + sumSquaredTerm1 += overnightReturn * overnightReturn; + sumSquaredTerm2 += openToHigh * openToHigh; + sumSquaredTerm3 += openToLow * openToLow; + } + + const variance = sumSquaredTerm1 + k * sumSquaredTerm2 + (1 - k) * sumSquaredTerm3; + return Math.sqrt(variance); +} + +/** + * Volume Order Imbalance (VOI) + */ +export function volumeOrderImbalance( + buyVolumes: number[], + sellVolumes: number[] +): number[] { + if (buyVolumes.length !== sellVolumes.length) return []; + + const voi: number[] = []; + for (let i = 0; i < buyVolumes.length; i++) { + voi.push(buyVolumes[i] - sellVolumes[i]); + } + return voi; +} + +/** + * Cumulative Volume Delta (CVD) + */ +export function cumulativeVolumeDelta( + buyVolumes: number[], + sellVolumes: number[] +): number[] { + if (buyVolumes.length !== sellVolumes.length) return []; + + const cvd: number[] = []; + let cumulativeDelta = 0; + for (let i = 0; i < buyVolumes.length; i++) { + cumulativeDelta += buyVolumes[i] - sellVolumes[i]; + cvd.push(cumulativeDelta); + } + return cvd; +} + +/** + * Market Order Ratio + */ +export function marketOrderRatio( + marketOrders: number[], + limitOrders: number[] +): number[] { + if (marketOrders.length !== limitOrders.length) return []; + + const ratios: number[] = []; + for (let i = 0; i < marketOrders.length; i++) { + const totalOrders = marketOrders[i] + limitOrders[i]; + ratios.push(totalOrders > 0 ? marketOrders[i] / totalOrders : 0); + } + return ratios; +} + +/** + * Helper function to calculate the average of an array of numbers + */ + +function average(arr: number[]): number { + if (arr.length === 0) return 0; + return arr.reduce((a, b) => a + b, 0) / arr.length; +} + +function calculateVolatility(returns: number[]): number { + if (returns.length < 2) return 0; + + const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); + + return Math.sqrt(variance); +} + +function calculateCorrelation(x: number[], y: number[]): number { + if (x.length !== y.length || x.length < 2) return 0; + + const n = x.length; + const meanX = x.reduce((sum, val) => sum + val, 0) / n; + const meanY = y.reduce((sum, val) => sum + val, 0) / n; + + let numerator = 0; + let sumXSquared = 0; + let sumYSquared = 0; + + for (let i = 0; i < n; i++) { + const xDiff = x[i] - meanX; + const yDiff = y[i] - meanY; + + numerator += xDiff * yDiff; + sumXSquared += xDiff * xDiff; + sumYSquared += yDiff * yDiff; + } + + const denominator = Math.sqrt(sumXSquared * sumYSquared); + + return denominator > 0 ? numerator / denominator : 0; +} + +function calculateVariance(values: number[]): number { + if (values.length < 2) return 0; + + const mean = values.reduce((sum, val) => sum + val, 0) / values.length; + return values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / (values.length - 1); +} + +function calculateCovariance(x: number[], y: number[]): number { + if (x.length !== y.length || x.length < 2) return 0; + + const n = x.length; + const meanX = x.reduce((sum, val) => sum + val, 0) / n; + const meanY = y.reduce((sum, val) => sum + val, 0) / n; + + return x.reduce((sum, val, i) => sum + (val - meanX) * (y[i] - meanY), 0) / (n - 1); +} diff --git a/libs/utils/src/calculations/options-pricing.ts b/libs/utils/src/calculations/options-pricing.ts index 210ae38..68147c1 100644 --- a/libs/utils/src/calculations/options-pricing.ts +++ b/libs/utils/src/calculations/options-pricing.ts @@ -1,718 +1,718 @@ -/** - * Options Pricing Models - * Implementation of various options pricing models and Greeks calculations - */ - -export interface OptionParameters { - spotPrice: number; - strikePrice: number; - timeToExpiry: number; // in years - riskFreeRate: number; - volatility: number; - dividendYield?: number; -} - -export interface OptionPricing { - callPrice: number; - putPrice: number; - intrinsicValueCall: number; - intrinsicValuePut: number; - timeValueCall: number; - timeValuePut: number; -} - -export interface GreeksCalculation { - delta: number; - gamma: number; - theta: number; - vega: number; - rho: number; -} - -export interface ImpliedVolatilityResult { - impliedVolatility: number; - iterations: number; - converged: boolean; -} - -/** - * Black-Scholes option pricing model - */ -export function blackScholes(params: OptionParameters): OptionPricing { - const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params; - - if (timeToExpiry <= 0) { - const intrinsicValueCall = Math.max(spotPrice - strikePrice, 0); - const intrinsicValuePut = Math.max(strikePrice - spotPrice, 0); - - return { - callPrice: intrinsicValueCall, - putPrice: intrinsicValuePut, - intrinsicValueCall, - intrinsicValuePut, - timeValueCall: 0, - timeValuePut: 0 - }; - } - - const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate - dividendYield + 0.5 * volatility * volatility) * timeToExpiry) / - (volatility * Math.sqrt(timeToExpiry)); - const d2 = d1 - volatility * Math.sqrt(timeToExpiry); - - const nd1 = normalCDF(d1); - const nd2 = normalCDF(d2); - const nMinusd1 = normalCDF(-d1); - const nMinusd2 = normalCDF(-d2); - - const callPrice = spotPrice * Math.exp(-dividendYield * timeToExpiry) * nd1 - - strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * nd2; - - const putPrice = strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * nMinusd2 - - spotPrice * Math.exp(-dividendYield * timeToExpiry) * nMinusd1; - - const intrinsicValueCall = Math.max(spotPrice - strikePrice, 0); - const intrinsicValuePut = Math.max(strikePrice - spotPrice, 0); - - const timeValueCall = callPrice - intrinsicValueCall; - const timeValuePut = putPrice - intrinsicValuePut; - - return { - callPrice, - putPrice, - intrinsicValueCall, - intrinsicValuePut, - timeValueCall, - timeValuePut - }; -} - -export function impliedVolatility( - price: number, S: number, K: number, T: number, r: number, isCall = true -): number { - // …Newton–Raphson on Οƒ to match blackScholesPrice - let sigma = 0.2; // Initial guess for volatility - const tolerance = 1e-6; - const maxIterations = 100; - let iteration = 0; - let priceDiff = 1; // Initialize to a non-zero value - while (Math.abs(priceDiff) > tolerance && iteration < maxIterations) { - const params: OptionParameters = { - spotPrice: S, - strikePrice: K, - timeToExpiry: T, - riskFreeRate: r, - volatility: sigma - }; - - const calculatedPrice = isCall ? blackScholes(params).callPrice : blackScholes(params).putPrice; - priceDiff = calculatedPrice - price; - - // Calculate Vega - const greeks = calculateGreeks(params, isCall ? 'call' : 'put'); - const vega = greeks.vega * 100; // Convert from percentage to absolute - - if (vega === 0) { - break; // Avoid division by zero - } - - sigma -= priceDiff / vega; // Update volatility estimate - iteration++; - } - if (iteration === maxIterations) { - console.warn('Implied volatility calculation did not converge'); - } - - if (sigma < 0) { - console.warn('Calculated implied volatility is negative, returning 0'); - return 0; - } - - if (sigma > 10) { - console.warn('Calculated implied volatility is too high, returning 10'); - return 10; // Cap at a reasonable maximum - } - if (isNaN(sigma)) { - console.warn('Calculated implied volatility is NaN, returning 0'); - return 0; - } - return sigma -} - -/** - * Calculate option Greeks using Black-Scholes model - */ -export function calculateGreeks(params: OptionParameters, optionType: 'call' | 'put' = 'call'): GreeksCalculation { - const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params; - - if (timeToExpiry <= 0) { - return { - delta: optionType === 'call' ? (spotPrice > strikePrice ? 1 : 0) : (spotPrice < strikePrice ? -1 : 0), - gamma: 0, - theta: 0, - vega: 0, - rho: 0 - }; - } - - const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate - dividendYield + 0.5 * volatility * volatility) * timeToExpiry) / - (volatility * Math.sqrt(timeToExpiry)); - const d2 = d1 - volatility * Math.sqrt(timeToExpiry); - - const nd1 = normalCDF(d1); - const nd2 = normalCDF(d2); - const npd1 = normalPDF(d1); - - // Delta - const callDelta = Math.exp(-dividendYield * timeToExpiry) * nd1; - const putDelta = Math.exp(-dividendYield * timeToExpiry) * (nd1 - 1); - const delta = optionType === 'call' ? callDelta : putDelta; - - // Gamma (same for calls and puts) - const gamma = Math.exp(-dividendYield * timeToExpiry) * npd1 / - (spotPrice * volatility * Math.sqrt(timeToExpiry)); - - // Theta - const term1 = -(spotPrice * npd1 * volatility * Math.exp(-dividendYield * timeToExpiry)) / - (2 * Math.sqrt(timeToExpiry)); - const term2Call = riskFreeRate * strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * nd2; - const term2Put = -riskFreeRate * strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(-d2); - const term3 = dividendYield * spotPrice * Math.exp(-dividendYield * timeToExpiry) * - (optionType === 'call' ? nd1 : normalCDF(-d1)); - - const theta = optionType === 'call' ? - (term1 - term2Call + term3) / 365 : - (term1 + term2Put + term3) / 365; - - // Vega (same for calls and puts) - const vega = spotPrice * Math.exp(-dividendYield * timeToExpiry) * npd1 * Math.sqrt(timeToExpiry) / 100; - - // Rho - const callRho = strikePrice * timeToExpiry * Math.exp(-riskFreeRate * timeToExpiry) * nd2 / 100; - const putRho = -strikePrice * timeToExpiry * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(-d2) / 100; - const rho = optionType === 'call' ? callRho : putRho; - - return { - delta, - gamma, - theta, - vega, - rho - }; -} - -/** - * Calculate implied volatility using Newton-Raphson method - */ -export function calculateImpliedVolatility( - marketPrice: number, - spotPrice: number, - strikePrice: number, - timeToExpiry: number, - riskFreeRate: number, - optionType: 'call' | 'put' = 'call', - dividendYield: number = 0, - initialGuess: number = 0.2, - tolerance: number = 1e-6, - maxIterations: number = 100 -): ImpliedVolatilityResult { - let volatility = initialGuess; - let iterations = 0; - let converged = false; - - for (let i = 0; i < maxIterations; i++) { - iterations = i + 1; - - const params: OptionParameters = { - spotPrice, - strikePrice, - timeToExpiry, - riskFreeRate, - volatility, - dividendYield - }; - - const pricing = blackScholes(params); - const theoreticalPrice = optionType === 'call' ? pricing.callPrice : pricing.putPrice; - - const priceDiff = theoreticalPrice - marketPrice; - - if (Math.abs(priceDiff) < tolerance) { - converged = true; - break; - } - - // Calculate vega for Newton-Raphson - const greeks = calculateGreeks(params, optionType); - const vega = greeks.vega * 100; // Convert back from percentage - - if (Math.abs(vega) < 1e-10) { - break; // Avoid division by zero - } - - volatility = volatility - priceDiff / vega; - - // Keep volatility within reasonable bounds - volatility = Math.max(0.001, Math.min(volatility, 10)); - } - - return { - impliedVolatility: volatility, - iterations, - converged - }; -} - -/** - * Binomial option pricing model - */ -export function binomialOptionPricing( - params: OptionParameters, - optionType: 'call' | 'put' = 'call', - americanStyle: boolean = false, - steps: number = 100 -): OptionPricing { - const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params; - - const dt = timeToExpiry / steps; - const u = Math.exp(volatility * Math.sqrt(dt)); - const d = 1 / u; - const p = (Math.exp((riskFreeRate - dividendYield) * dt) - d) / (u - d); - const discount = Math.exp(-riskFreeRate * dt); - - // Create price tree - const stockPrices: number[][] = []; - for (let i = 0; i <= steps; i++) { - stockPrices[i] = []; - for (let j = 0; j <= i; j++) { - stockPrices[i][j] = spotPrice * Math.pow(u, i - j) * Math.pow(d, j); - } - } - - // Calculate option values at expiration - const optionValues: number[][] = []; - for (let i = 0; i <= steps; i++) { - optionValues[i] = []; - } - - for (let j = 0; j <= steps; j++) { - if (optionType === 'call') { - optionValues[steps][j] = Math.max(stockPrices[steps][j] - strikePrice, 0); - } else { - optionValues[steps][j] = Math.max(strikePrice - stockPrices[steps][j], 0); - } - } - - // Work backwards through the tree - for (let i = steps - 1; i >= 0; i--) { - for (let j = 0; j <= i; j++) { - // European option value - const holdValue = discount * (p * optionValues[i + 1][j] + (1 - p) * optionValues[i + 1][j + 1]); - - if (americanStyle) { - // American option - can exercise early - const exerciseValue = optionType === 'call' ? - Math.max(stockPrices[i][j] - strikePrice, 0) : - Math.max(strikePrice - stockPrices[i][j], 0); - - optionValues[i][j] = Math.max(holdValue, exerciseValue); - } else { - optionValues[i][j] = holdValue; - } - } - } - - const price = optionValues[0][0]; - const intrinsicValue = optionType === 'call' ? - Math.max(spotPrice - strikePrice, 0) : - Math.max(strikePrice - spotPrice, 0); - const timeValue = price - intrinsicValue; - - if (optionType === 'call') { - return { - callPrice: price, - putPrice: 0, // Not calculated - intrinsicValueCall: intrinsicValue, - intrinsicValuePut: 0, - timeValueCall: timeValue, - timeValuePut: 0 - }; - } else { - return { - callPrice: 0, // Not calculated - putPrice: price, - intrinsicValueCall: 0, - intrinsicValuePut: intrinsicValue, - timeValueCall: 0, - timeValuePut: timeValue - }; - } -} - -/** - * Monte Carlo option pricing - */ -export function monteCarloOptionPricing( - params: OptionParameters, - optionType: 'call' | 'put' = 'call', - numSimulations: number = 100000 -): OptionPricing { - const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params; - - let totalPayoff = 0; - - for (let i = 0; i < numSimulations; i++) { - // Generate random price path - const z = boxMullerTransform(); - const finalPrice = spotPrice * Math.exp( - (riskFreeRate - dividendYield - 0.5 * volatility * volatility) * timeToExpiry + - volatility * Math.sqrt(timeToExpiry) * z - ); - - // Calculate payoff - const payoff = optionType === 'call' ? - Math.max(finalPrice - strikePrice, 0) : - Math.max(strikePrice - finalPrice, 0); - - totalPayoff += payoff; - } - - const averagePayoff = totalPayoff / numSimulations; - const price = averagePayoff * Math.exp(-riskFreeRate * timeToExpiry); - - const intrinsicValue = optionType === 'call' ? - Math.max(spotPrice - strikePrice, 0) : - Math.max(strikePrice - spotPrice, 0); - const timeValue = price - intrinsicValue; - - if (optionType === 'call') { - return { - callPrice: price, - putPrice: 0, - intrinsicValueCall: intrinsicValue, - intrinsicValuePut: 0, - timeValueCall: timeValue, - timeValuePut: 0 - }; - } else { - return { - callPrice: 0, - putPrice: price, - intrinsicValueCall: 0, - intrinsicValuePut: intrinsicValue, - timeValueCall: 0, - timeValuePut: timeValue - }; - } -} - -/** - * Calculate option portfolio risk metrics - */ -export function calculateOptionPortfolioRisk( - positions: Array<{ - optionType: 'call' | 'put'; - quantity: number; - params: OptionParameters; - }> -): { - totalDelta: number; - totalGamma: number; - totalTheta: number; - totalVega: number; - totalRho: number; - portfolioValue: number; -} { - let totalDelta = 0; - let totalGamma = 0; - let totalTheta = 0; - let totalVega = 0; - let totalRho = 0; - let portfolioValue = 0; - - for (const position of positions) { - const greeks = calculateGreeks(position.params, position.optionType); - const pricing = blackScholes(position.params); - const optionPrice = position.optionType === 'call' ? pricing.callPrice : pricing.putPrice; - - totalDelta += greeks.delta * position.quantity; - totalGamma += greeks.gamma * position.quantity; - totalTheta += greeks.theta * position.quantity; - totalVega += greeks.vega * position.quantity; - totalRho += greeks.rho * position.quantity; - portfolioValue += optionPrice * position.quantity; - } - - return { - totalDelta, - totalGamma, - totalTheta, - totalVega, - totalRho, - portfolioValue - }; -} - -/** - * Volatility surface interpolation - */ -export function interpolateVolatilitySurface( - strikes: number[], - expiries: number[], - volatilities: number[][], - targetStrike: number, - targetExpiry: number -): number { - // Simplified bilinear interpolation - // In production, use more sophisticated interpolation methods - - // Find surrounding points - let strikeIndex = 0; - let expiryIndex = 0; - - for (let i = 0; i < strikes.length - 1; i++) { - if (targetStrike >= strikes[i] && targetStrike <= strikes[i + 1]) { - strikeIndex = i; - break; - } - } - - for (let i = 0; i < expiries.length - 1; i++) { - if (targetExpiry >= expiries[i] && targetExpiry <= expiries[i + 1]) { - expiryIndex = i; - break; - } - } - - // Bilinear interpolation - const x1 = strikes[strikeIndex]; - const x2 = strikes[strikeIndex + 1]; - const y1 = expiries[expiryIndex]; - const y2 = expiries[expiryIndex + 1]; - - const q11 = volatilities[expiryIndex][strikeIndex]; - const q12 = volatilities[expiryIndex + 1][strikeIndex]; - const q21 = volatilities[expiryIndex][strikeIndex + 1]; - const q22 = volatilities[expiryIndex + 1][strikeIndex + 1]; - - const wx = (targetStrike - x1) / (x2 - x1); - const wy = (targetExpiry - y1) / (y2 - y1); - - return q11 * (1 - wx) * (1 - wy) + - q21 * wx * (1 - wy) + - q12 * (1 - wx) * wy + - q22 * wx * wy; -} - -// Helper functions - -/** - * Normal cumulative distribution function - */ -function normalCDF(x: number): number { - return 0.5 * (1 + erf(x / Math.sqrt(2))); -} - -/** - * Normal probability density function - */ -function normalPDF(x: number): number { - return Math.exp(-0.5 * x * x) / Math.sqrt(2 * Math.PI); -} - -/** - * Error function approximation - */ -function erf(x: number): number { - // Abramowitz and Stegun approximation - const a1 = 0.254829592; - const a2 = -0.284496736; - const a3 = 1.421413741; - const a4 = -1.453152027; - const a5 = 1.061405429; - const p = 0.3275911; - - const sign = x >= 0 ? 1 : -1; - x = Math.abs(x); - - const t = 1.0 / (1.0 + p * x); - const y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x); - - return sign * y; -} - -/** - * Box-Muller transformation for normal random numbers - */ -function boxMullerTransform(): number { - let u1 = Math.random(); - let u2 = Math.random(); - - // Ensure u1 is not zero - while (u1 === 0) { - u1 = Math.random(); - } - - return Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2); -} - -/** - * Prices a straddle option strategy - */ -export function straddle(params: OptionParameters): { callPrice: number; putPrice: number; strategyCost: number } { - const callOption = blackScholes(params); - const putOption = blackScholes(params); - const strategyCost = callOption.callPrice + putOption.putPrice; - - return { - callPrice: callOption.callPrice, - putPrice: putOption.putPrice, - strategyCost: strategyCost - }; -} - -/** - * Prices a strangle option strategy - */ -export function strangle(callParams: OptionParameters, putParams: OptionParameters): { callPrice: number; putPrice: number; strategyCost: number } { - const callOption = blackScholes(callParams); - const putOption = blackScholes(putParams); - const strategyCost = callOption.callPrice + putOption.putPrice; - - return { - callPrice: callOption.callPrice, - putPrice: putOption.putPrice, - strategyCost: strategyCost - }; -} - -/** - * Prices a butterfly option strategy - */ -export function butterfly( - lowerStrikeParams: OptionParameters, - middleStrikeParams: OptionParameters, - upperStrikeParams: OptionParameters -): { - lowerCallPrice: number; - middleCallPrice: number; - upperCallPrice: number; - strategyCost: number; -} { - const lowerCall = blackScholes(lowerStrikeParams); - const middleCall = blackScholes(middleStrikeParams); - const upperCall = blackScholes(upperStrikeParams); - - const strategyCost = lowerCall.callPrice - 2 * middleCall.callPrice + upperCall.callPrice; - - return { - lowerCallPrice: lowerCall.callPrice, - middleCallPrice: middleCall.callPrice, - upperCallPrice: upperCall.callPrice, - strategyCost: strategyCost - }; -} - -/** - * Prices a condor option strategy - */ -export function condor( - lowerStrikeParams: OptionParameters, - middleLowerStrikeParams: OptionParameters, - middleUpperStrikeParams: OptionParameters, - upperStrikeParams: OptionParameters -): { - lowerCallPrice: number; - middleLowerCallPrice: number; - middleUpperCallPrice: number; - upperCallPrice: number; - strategyCost: number; -} { - const lowerCall = blackScholes(lowerStrikeParams); - const middleLowerCall = blackScholes(middleLowerStrikeParams); - const middleUpperCall = blackScholes(middleUpperStrikeParams); - const upperCall = blackScholes(upperStrikeParams); - - const strategyCost = lowerCall.callPrice - middleLowerCall.callPrice - middleUpperCall.callPrice + upperCall.callPrice; - - return { - lowerCallPrice: lowerCall.callPrice, - middleLowerCallPrice: middleLowerCall.callPrice, - middleUpperCallPrice: middleUpperCall.callPrice, - upperCallPrice: upperCall.callPrice, - strategyCost: strategyCost - }; -} - -/** - * Calculates combined Greeks for an option strategy - */ -export function calculateStrategyGreeks( - positions: Array<{ - optionType: 'call' | 'put'; - quantity: number; - params: OptionParameters; - }> -): GreeksCalculation { - let totalDelta = 0; - let totalGamma = 0; - let totalTheta = 0; - let totalVega = 0; - let totalRho = 0; - - for (const position of positions) { - const greeks = calculateGreeks(position.params, position.optionType); - - totalDelta += greeks.delta * position.quantity; - totalGamma += greeks.gamma * position.quantity; - totalTheta += greeks.theta * position.quantity; - totalVega += greeks.vega * position.quantity; - totalRho += greeks.rho * position.quantity; - } - - return { - delta: totalDelta, - gamma: totalGamma, - theta: totalTheta, - vega: totalVega, - rho: totalRho - }; -} - -/** - * Black-Scholes option pricing model with greeks - */ -export function blackScholesWithGreeks(params: OptionParameters, optionType: 'call' | 'put' = 'call'): { pricing: OptionPricing; greeks: GreeksCalculation } { - const pricing = blackScholes(params); - const greeks = calculateGreeks(params, optionType); - return { pricing, greeks }; -} - -/** - * Calculates the breakeven point for a call option at expiration - */ -export function callBreakeven(strikePrice: number, callPrice: number): number { - return strikePrice + callPrice; -} - -/** - * Calculates the breakeven point for a put option at expiration - */ -export function putBreakeven(strikePrice: number, putPrice: number): number { - return strikePrice - putPrice; -} - -/** - * Estimates the probability of profit for a call option at expiration - */ -export function callProbabilityOfProfit(spotPrice: number, strikePrice: number, timeToExpiry: number, riskFreeRate: number, volatility: number): number { - const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate + 0.5 * volatility * volatility) * timeToExpiry) / (volatility * Math.sqrt(timeToExpiry)); - return normalCDF(d1); -} - -/** - * Estimates the probability of profit for a put option at expiration - */ -export function putProbabilityOfProfit(spotPrice: number, strikePrice: number, timeToExpiry: number, riskFreeRate: number, volatility: number): number { - const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate + 0.5 * volatility * volatility) * timeToExpiry) / (volatility * Math.sqrt(timeToExpiry)); - return 1 - normalCDF(d1); +/** + * Options Pricing Models + * Implementation of various options pricing models and Greeks calculations + */ + +export interface OptionParameters { + spotPrice: number; + strikePrice: number; + timeToExpiry: number; // in years + riskFreeRate: number; + volatility: number; + dividendYield?: number; +} + +export interface OptionPricing { + callPrice: number; + putPrice: number; + intrinsicValueCall: number; + intrinsicValuePut: number; + timeValueCall: number; + timeValuePut: number; +} + +export interface GreeksCalculation { + delta: number; + gamma: number; + theta: number; + vega: number; + rho: number; +} + +export interface ImpliedVolatilityResult { + impliedVolatility: number; + iterations: number; + converged: boolean; +} + +/** + * Black-Scholes option pricing model + */ +export function blackScholes(params: OptionParameters): OptionPricing { + const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params; + + if (timeToExpiry <= 0) { + const intrinsicValueCall = Math.max(spotPrice - strikePrice, 0); + const intrinsicValuePut = Math.max(strikePrice - spotPrice, 0); + + return { + callPrice: intrinsicValueCall, + putPrice: intrinsicValuePut, + intrinsicValueCall, + intrinsicValuePut, + timeValueCall: 0, + timeValuePut: 0 + }; + } + + const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate - dividendYield + 0.5 * volatility * volatility) * timeToExpiry) / + (volatility * Math.sqrt(timeToExpiry)); + const d2 = d1 - volatility * Math.sqrt(timeToExpiry); + + const nd1 = normalCDF(d1); + const nd2 = normalCDF(d2); + const nMinusd1 = normalCDF(-d1); + const nMinusd2 = normalCDF(-d2); + + const callPrice = spotPrice * Math.exp(-dividendYield * timeToExpiry) * nd1 - + strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * nd2; + + const putPrice = strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * nMinusd2 - + spotPrice * Math.exp(-dividendYield * timeToExpiry) * nMinusd1; + + const intrinsicValueCall = Math.max(spotPrice - strikePrice, 0); + const intrinsicValuePut = Math.max(strikePrice - spotPrice, 0); + + const timeValueCall = callPrice - intrinsicValueCall; + const timeValuePut = putPrice - intrinsicValuePut; + + return { + callPrice, + putPrice, + intrinsicValueCall, + intrinsicValuePut, + timeValueCall, + timeValuePut + }; +} + +export function impliedVolatility( + price: number, S: number, K: number, T: number, r: number, isCall = true +): number { + // …Newton–Raphson on Οƒ to match blackScholesPrice + let sigma = 0.2; // Initial guess for volatility + const tolerance = 1e-6; + const maxIterations = 100; + let iteration = 0; + let priceDiff = 1; // Initialize to a non-zero value + while (Math.abs(priceDiff) > tolerance && iteration < maxIterations) { + const params: OptionParameters = { + spotPrice: S, + strikePrice: K, + timeToExpiry: T, + riskFreeRate: r, + volatility: sigma + }; + + const calculatedPrice = isCall ? blackScholes(params).callPrice : blackScholes(params).putPrice; + priceDiff = calculatedPrice - price; + + // Calculate Vega + const greeks = calculateGreeks(params, isCall ? 'call' : 'put'); + const vega = greeks.vega * 100; // Convert from percentage to absolute + + if (vega === 0) { + break; // Avoid division by zero + } + + sigma -= priceDiff / vega; // Update volatility estimate + iteration++; + } + if (iteration === maxIterations) { + console.warn('Implied volatility calculation did not converge'); + } + + if (sigma < 0) { + console.warn('Calculated implied volatility is negative, returning 0'); + return 0; + } + + if (sigma > 10) { + console.warn('Calculated implied volatility is too high, returning 10'); + return 10; // Cap at a reasonable maximum + } + if (isNaN(sigma)) { + console.warn('Calculated implied volatility is NaN, returning 0'); + return 0; + } + return sigma +} + +/** + * Calculate option Greeks using Black-Scholes model + */ +export function calculateGreeks(params: OptionParameters, optionType: 'call' | 'put' = 'call'): GreeksCalculation { + const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params; + + if (timeToExpiry <= 0) { + return { + delta: optionType === 'call' ? (spotPrice > strikePrice ? 1 : 0) : (spotPrice < strikePrice ? -1 : 0), + gamma: 0, + theta: 0, + vega: 0, + rho: 0 + }; + } + + const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate - dividendYield + 0.5 * volatility * volatility) * timeToExpiry) / + (volatility * Math.sqrt(timeToExpiry)); + const d2 = d1 - volatility * Math.sqrt(timeToExpiry); + + const nd1 = normalCDF(d1); + const nd2 = normalCDF(d2); + const npd1 = normalPDF(d1); + + // Delta + const callDelta = Math.exp(-dividendYield * timeToExpiry) * nd1; + const putDelta = Math.exp(-dividendYield * timeToExpiry) * (nd1 - 1); + const delta = optionType === 'call' ? callDelta : putDelta; + + // Gamma (same for calls and puts) + const gamma = Math.exp(-dividendYield * timeToExpiry) * npd1 / + (spotPrice * volatility * Math.sqrt(timeToExpiry)); + + // Theta + const term1 = -(spotPrice * npd1 * volatility * Math.exp(-dividendYield * timeToExpiry)) / + (2 * Math.sqrt(timeToExpiry)); + const term2Call = riskFreeRate * strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * nd2; + const term2Put = -riskFreeRate * strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(-d2); + const term3 = dividendYield * spotPrice * Math.exp(-dividendYield * timeToExpiry) * + (optionType === 'call' ? nd1 : normalCDF(-d1)); + + const theta = optionType === 'call' ? + (term1 - term2Call + term3) / 365 : + (term1 + term2Put + term3) / 365; + + // Vega (same for calls and puts) + const vega = spotPrice * Math.exp(-dividendYield * timeToExpiry) * npd1 * Math.sqrt(timeToExpiry) / 100; + + // Rho + const callRho = strikePrice * timeToExpiry * Math.exp(-riskFreeRate * timeToExpiry) * nd2 / 100; + const putRho = -strikePrice * timeToExpiry * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(-d2) / 100; + const rho = optionType === 'call' ? callRho : putRho; + + return { + delta, + gamma, + theta, + vega, + rho + }; +} + +/** + * Calculate implied volatility using Newton-Raphson method + */ +export function calculateImpliedVolatility( + marketPrice: number, + spotPrice: number, + strikePrice: number, + timeToExpiry: number, + riskFreeRate: number, + optionType: 'call' | 'put' = 'call', + dividendYield: number = 0, + initialGuess: number = 0.2, + tolerance: number = 1e-6, + maxIterations: number = 100 +): ImpliedVolatilityResult { + let volatility = initialGuess; + let iterations = 0; + let converged = false; + + for (let i = 0; i < maxIterations; i++) { + iterations = i + 1; + + const params: OptionParameters = { + spotPrice, + strikePrice, + timeToExpiry, + riskFreeRate, + volatility, + dividendYield + }; + + const pricing = blackScholes(params); + const theoreticalPrice = optionType === 'call' ? pricing.callPrice : pricing.putPrice; + + const priceDiff = theoreticalPrice - marketPrice; + + if (Math.abs(priceDiff) < tolerance) { + converged = true; + break; + } + + // Calculate vega for Newton-Raphson + const greeks = calculateGreeks(params, optionType); + const vega = greeks.vega * 100; // Convert back from percentage + + if (Math.abs(vega) < 1e-10) { + break; // Avoid division by zero + } + + volatility = volatility - priceDiff / vega; + + // Keep volatility within reasonable bounds + volatility = Math.max(0.001, Math.min(volatility, 10)); + } + + return { + impliedVolatility: volatility, + iterations, + converged + }; +} + +/** + * Binomial option pricing model + */ +export function binomialOptionPricing( + params: OptionParameters, + optionType: 'call' | 'put' = 'call', + americanStyle: boolean = false, + steps: number = 100 +): OptionPricing { + const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params; + + const dt = timeToExpiry / steps; + const u = Math.exp(volatility * Math.sqrt(dt)); + const d = 1 / u; + const p = (Math.exp((riskFreeRate - dividendYield) * dt) - d) / (u - d); + const discount = Math.exp(-riskFreeRate * dt); + + // Create price tree + const stockPrices: number[][] = []; + for (let i = 0; i <= steps; i++) { + stockPrices[i] = []; + for (let j = 0; j <= i; j++) { + stockPrices[i][j] = spotPrice * Math.pow(u, i - j) * Math.pow(d, j); + } + } + + // Calculate option values at expiration + const optionValues: number[][] = []; + for (let i = 0; i <= steps; i++) { + optionValues[i] = []; + } + + for (let j = 0; j <= steps; j++) { + if (optionType === 'call') { + optionValues[steps][j] = Math.max(stockPrices[steps][j] - strikePrice, 0); + } else { + optionValues[steps][j] = Math.max(strikePrice - stockPrices[steps][j], 0); + } + } + + // Work backwards through the tree + for (let i = steps - 1; i >= 0; i--) { + for (let j = 0; j <= i; j++) { + // European option value + const holdValue = discount * (p * optionValues[i + 1][j] + (1 - p) * optionValues[i + 1][j + 1]); + + if (americanStyle) { + // American option - can exercise early + const exerciseValue = optionType === 'call' ? + Math.max(stockPrices[i][j] - strikePrice, 0) : + Math.max(strikePrice - stockPrices[i][j], 0); + + optionValues[i][j] = Math.max(holdValue, exerciseValue); + } else { + optionValues[i][j] = holdValue; + } + } + } + + const price = optionValues[0][0]; + const intrinsicValue = optionType === 'call' ? + Math.max(spotPrice - strikePrice, 0) : + Math.max(strikePrice - spotPrice, 0); + const timeValue = price - intrinsicValue; + + if (optionType === 'call') { + return { + callPrice: price, + putPrice: 0, // Not calculated + intrinsicValueCall: intrinsicValue, + intrinsicValuePut: 0, + timeValueCall: timeValue, + timeValuePut: 0 + }; + } else { + return { + callPrice: 0, // Not calculated + putPrice: price, + intrinsicValueCall: 0, + intrinsicValuePut: intrinsicValue, + timeValueCall: 0, + timeValuePut: timeValue + }; + } +} + +/** + * Monte Carlo option pricing + */ +export function monteCarloOptionPricing( + params: OptionParameters, + optionType: 'call' | 'put' = 'call', + numSimulations: number = 100000 +): OptionPricing { + const { spotPrice, strikePrice, timeToExpiry, riskFreeRate, volatility, dividendYield = 0 } = params; + + let totalPayoff = 0; + + for (let i = 0; i < numSimulations; i++) { + // Generate random price path + const z = boxMullerTransform(); + const finalPrice = spotPrice * Math.exp( + (riskFreeRate - dividendYield - 0.5 * volatility * volatility) * timeToExpiry + + volatility * Math.sqrt(timeToExpiry) * z + ); + + // Calculate payoff + const payoff = optionType === 'call' ? + Math.max(finalPrice - strikePrice, 0) : + Math.max(strikePrice - finalPrice, 0); + + totalPayoff += payoff; + } + + const averagePayoff = totalPayoff / numSimulations; + const price = averagePayoff * Math.exp(-riskFreeRate * timeToExpiry); + + const intrinsicValue = optionType === 'call' ? + Math.max(spotPrice - strikePrice, 0) : + Math.max(strikePrice - spotPrice, 0); + const timeValue = price - intrinsicValue; + + if (optionType === 'call') { + return { + callPrice: price, + putPrice: 0, + intrinsicValueCall: intrinsicValue, + intrinsicValuePut: 0, + timeValueCall: timeValue, + timeValuePut: 0 + }; + } else { + return { + callPrice: 0, + putPrice: price, + intrinsicValueCall: 0, + intrinsicValuePut: intrinsicValue, + timeValueCall: 0, + timeValuePut: timeValue + }; + } +} + +/** + * Calculate option portfolio risk metrics + */ +export function calculateOptionPortfolioRisk( + positions: Array<{ + optionType: 'call' | 'put'; + quantity: number; + params: OptionParameters; + }> +): { + totalDelta: number; + totalGamma: number; + totalTheta: number; + totalVega: number; + totalRho: number; + portfolioValue: number; +} { + let totalDelta = 0; + let totalGamma = 0; + let totalTheta = 0; + let totalVega = 0; + let totalRho = 0; + let portfolioValue = 0; + + for (const position of positions) { + const greeks = calculateGreeks(position.params, position.optionType); + const pricing = blackScholes(position.params); + const optionPrice = position.optionType === 'call' ? pricing.callPrice : pricing.putPrice; + + totalDelta += greeks.delta * position.quantity; + totalGamma += greeks.gamma * position.quantity; + totalTheta += greeks.theta * position.quantity; + totalVega += greeks.vega * position.quantity; + totalRho += greeks.rho * position.quantity; + portfolioValue += optionPrice * position.quantity; + } + + return { + totalDelta, + totalGamma, + totalTheta, + totalVega, + totalRho, + portfolioValue + }; +} + +/** + * Volatility surface interpolation + */ +export function interpolateVolatilitySurface( + strikes: number[], + expiries: number[], + volatilities: number[][], + targetStrike: number, + targetExpiry: number +): number { + // Simplified bilinear interpolation + // In production, use more sophisticated interpolation methods + + // Find surrounding points + let strikeIndex = 0; + let expiryIndex = 0; + + for (let i = 0; i < strikes.length - 1; i++) { + if (targetStrike >= strikes[i] && targetStrike <= strikes[i + 1]) { + strikeIndex = i; + break; + } + } + + for (let i = 0; i < expiries.length - 1; i++) { + if (targetExpiry >= expiries[i] && targetExpiry <= expiries[i + 1]) { + expiryIndex = i; + break; + } + } + + // Bilinear interpolation + const x1 = strikes[strikeIndex]; + const x2 = strikes[strikeIndex + 1]; + const y1 = expiries[expiryIndex]; + const y2 = expiries[expiryIndex + 1]; + + const q11 = volatilities[expiryIndex][strikeIndex]; + const q12 = volatilities[expiryIndex + 1][strikeIndex]; + const q21 = volatilities[expiryIndex][strikeIndex + 1]; + const q22 = volatilities[expiryIndex + 1][strikeIndex + 1]; + + const wx = (targetStrike - x1) / (x2 - x1); + const wy = (targetExpiry - y1) / (y2 - y1); + + return q11 * (1 - wx) * (1 - wy) + + q21 * wx * (1 - wy) + + q12 * (1 - wx) * wy + + q22 * wx * wy; +} + +// Helper functions + +/** + * Normal cumulative distribution function + */ +function normalCDF(x: number): number { + return 0.5 * (1 + erf(x / Math.sqrt(2))); +} + +/** + * Normal probability density function + */ +function normalPDF(x: number): number { + return Math.exp(-0.5 * x * x) / Math.sqrt(2 * Math.PI); +} + +/** + * Error function approximation + */ +function erf(x: number): number { + // Abramowitz and Stegun approximation + const a1 = 0.254829592; + const a2 = -0.284496736; + const a3 = 1.421413741; + const a4 = -1.453152027; + const a5 = 1.061405429; + const p = 0.3275911; + + const sign = x >= 0 ? 1 : -1; + x = Math.abs(x); + + const t = 1.0 / (1.0 + p * x); + const y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * Math.exp(-x * x); + + return sign * y; +} + +/** + * Box-Muller transformation for normal random numbers + */ +function boxMullerTransform(): number { + let u1 = Math.random(); + let u2 = Math.random(); + + // Ensure u1 is not zero + while (u1 === 0) { + u1 = Math.random(); + } + + return Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2); +} + +/** + * Prices a straddle option strategy + */ +export function straddle(params: OptionParameters): { callPrice: number; putPrice: number; strategyCost: number } { + const callOption = blackScholes(params); + const putOption = blackScholes(params); + const strategyCost = callOption.callPrice + putOption.putPrice; + + return { + callPrice: callOption.callPrice, + putPrice: putOption.putPrice, + strategyCost: strategyCost + }; +} + +/** + * Prices a strangle option strategy + */ +export function strangle(callParams: OptionParameters, putParams: OptionParameters): { callPrice: number; putPrice: number; strategyCost: number } { + const callOption = blackScholes(callParams); + const putOption = blackScholes(putParams); + const strategyCost = callOption.callPrice + putOption.putPrice; + + return { + callPrice: callOption.callPrice, + putPrice: putOption.putPrice, + strategyCost: strategyCost + }; +} + +/** + * Prices a butterfly option strategy + */ +export function butterfly( + lowerStrikeParams: OptionParameters, + middleStrikeParams: OptionParameters, + upperStrikeParams: OptionParameters +): { + lowerCallPrice: number; + middleCallPrice: number; + upperCallPrice: number; + strategyCost: number; +} { + const lowerCall = blackScholes(lowerStrikeParams); + const middleCall = blackScholes(middleStrikeParams); + const upperCall = blackScholes(upperStrikeParams); + + const strategyCost = lowerCall.callPrice - 2 * middleCall.callPrice + upperCall.callPrice; + + return { + lowerCallPrice: lowerCall.callPrice, + middleCallPrice: middleCall.callPrice, + upperCallPrice: upperCall.callPrice, + strategyCost: strategyCost + }; +} + +/** + * Prices a condor option strategy + */ +export function condor( + lowerStrikeParams: OptionParameters, + middleLowerStrikeParams: OptionParameters, + middleUpperStrikeParams: OptionParameters, + upperStrikeParams: OptionParameters +): { + lowerCallPrice: number; + middleLowerCallPrice: number; + middleUpperCallPrice: number; + upperCallPrice: number; + strategyCost: number; +} { + const lowerCall = blackScholes(lowerStrikeParams); + const middleLowerCall = blackScholes(middleLowerStrikeParams); + const middleUpperCall = blackScholes(middleUpperStrikeParams); + const upperCall = blackScholes(upperStrikeParams); + + const strategyCost = lowerCall.callPrice - middleLowerCall.callPrice - middleUpperCall.callPrice + upperCall.callPrice; + + return { + lowerCallPrice: lowerCall.callPrice, + middleLowerCallPrice: middleLowerCall.callPrice, + middleUpperCallPrice: middleUpperCall.callPrice, + upperCallPrice: upperCall.callPrice, + strategyCost: strategyCost + }; +} + +/** + * Calculates combined Greeks for an option strategy + */ +export function calculateStrategyGreeks( + positions: Array<{ + optionType: 'call' | 'put'; + quantity: number; + params: OptionParameters; + }> +): GreeksCalculation { + let totalDelta = 0; + let totalGamma = 0; + let totalTheta = 0; + let totalVega = 0; + let totalRho = 0; + + for (const position of positions) { + const greeks = calculateGreeks(position.params, position.optionType); + + totalDelta += greeks.delta * position.quantity; + totalGamma += greeks.gamma * position.quantity; + totalTheta += greeks.theta * position.quantity; + totalVega += greeks.vega * position.quantity; + totalRho += greeks.rho * position.quantity; + } + + return { + delta: totalDelta, + gamma: totalGamma, + theta: totalTheta, + vega: totalVega, + rho: totalRho + }; +} + +/** + * Black-Scholes option pricing model with greeks + */ +export function blackScholesWithGreeks(params: OptionParameters, optionType: 'call' | 'put' = 'call'): { pricing: OptionPricing; greeks: GreeksCalculation } { + const pricing = blackScholes(params); + const greeks = calculateGreeks(params, optionType); + return { pricing, greeks }; +} + +/** + * Calculates the breakeven point for a call option at expiration + */ +export function callBreakeven(strikePrice: number, callPrice: number): number { + return strikePrice + callPrice; +} + +/** + * Calculates the breakeven point for a put option at expiration + */ +export function putBreakeven(strikePrice: number, putPrice: number): number { + return strikePrice - putPrice; +} + +/** + * Estimates the probability of profit for a call option at expiration + */ +export function callProbabilityOfProfit(spotPrice: number, strikePrice: number, timeToExpiry: number, riskFreeRate: number, volatility: number): number { + const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate + 0.5 * volatility * volatility) * timeToExpiry) / (volatility * Math.sqrt(timeToExpiry)); + return normalCDF(d1); +} + +/** + * Estimates the probability of profit for a put option at expiration + */ +export function putProbabilityOfProfit(spotPrice: number, strikePrice: number, timeToExpiry: number, riskFreeRate: number, volatility: number): number { + const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate + 0.5 * volatility * volatility) * timeToExpiry) / (volatility * Math.sqrt(timeToExpiry)); + return 1 - normalCDF(d1); } \ No newline at end of file diff --git a/libs/utils/src/calculations/performance-metrics.ts b/libs/utils/src/calculations/performance-metrics.ts index 5dab08d..4808d44 100644 --- a/libs/utils/src/calculations/performance-metrics.ts +++ b/libs/utils/src/calculations/performance-metrics.ts @@ -1,756 +1,756 @@ -/** - * Performance Metrics and Analysis - * Comprehensive performance measurement tools for trading strategies and portfolios - */ - -import { PortfolioMetrics, ulcerIndex } from './index'; - -export interface TradePerformance { - totalTrades: number; - winningTrades: number; - losingTrades: number; - winRate: number; - averageWin: number; - averageLoss: number; - largestWin: number; - largestLoss: number; - profitFactor: number; - expectancy: number; - averageTradeReturn: number; - consecutiveWins: number; - consecutiveLosses: number; -} - -export interface DrawdownAnalysis { - maxDrawdown: number; - maxDrawdownDuration: number; - averageDrawdown: number; - drawdownPeriods: Array<{ - start: Date; - end: Date; - duration: number; - magnitude: number; - }>; -} - -export interface ReturnAnalysis { - totalReturn: number; - annualizedReturn: number; - compoundAnnualGrowthRate: number; - volatility: number; - annualizedVolatility: number; - skewness: number; - kurtosis: number; - bestMonth: number; - worstMonth: number; - positiveMonths: number; - negativeMonths: number; -} - -/** - * Calculate comprehensive trade performance metrics - */ -export function analyzeTradePerformance(trades: Array<{ pnl: number; date: Date }>): TradePerformance { - if (trades.length === 0) { - return { - totalTrades: 0, - winningTrades: 0, - losingTrades: 0, - winRate: 0, - averageWin: 0, - averageLoss: 0, - largestWin: 0, - largestLoss: 0, - profitFactor: 0, - expectancy: 0, - averageTradeReturn: 0, - consecutiveWins: 0, - consecutiveLosses: 0 - }; - } - - const winningTrades = trades.filter(trade => trade.pnl > 0); - const losingTrades = trades.filter(trade => trade.pnl < 0); - - const totalWins = winningTrades.reduce((sum, trade) => sum + trade.pnl, 0); - const totalLosses = Math.abs(losingTrades.reduce((sum, trade) => sum + trade.pnl, 0)); - - const averageWin = winningTrades.length > 0 ? totalWins / winningTrades.length : 0; - const averageLoss = losingTrades.length > 0 ? totalLosses / losingTrades.length : 0; - - const largestWin = winningTrades.length > 0 ? Math.max(...winningTrades.map(t => t.pnl)) : 0; - const largestLoss = losingTrades.length > 0 ? Math.min(...losingTrades.map(t => t.pnl)) : 0; - - const profitFactor = totalLosses > 0 ? totalWins / totalLosses : totalWins > 0 ? Infinity : 0; - const winRate = winningTrades.length / trades.length; - const expectancy = (winRate * averageWin) - ((1 - winRate) * averageLoss); - - const totalPnL = trades.reduce((sum, trade) => sum + trade.pnl, 0); - const averageTradeReturn = totalPnL / trades.length; - - // Calculate consecutive wins/losses - let consecutiveWins = 0; - let consecutiveLosses = 0; - let currentWinStreak = 0; - let currentLossStreak = 0; - - for (const trade of trades) { - if (trade.pnl > 0) { - currentWinStreak++; - currentLossStreak = 0; - consecutiveWins = Math.max(consecutiveWins, currentWinStreak); - } else if (trade.pnl < 0) { - currentLossStreak++; - currentWinStreak = 0; - consecutiveLosses = Math.max(consecutiveLosses, currentLossStreak); - } - } - - return { - totalTrades: trades.length, - winningTrades: winningTrades.length, - losingTrades: losingTrades.length, - winRate, - averageWin, - averageLoss, - largestWin, - largestLoss, - profitFactor, - expectancy, - averageTradeReturn, - consecutiveWins, - consecutiveLosses - }; -} - -/** - * Analyze drawdown characteristics - */ -export function analyzeDrawdowns(equityCurve: Array<{ value: number; date: Date }>): DrawdownAnalysis { - if (equityCurve.length < 2) { - return { - maxDrawdown: 0, - maxDrawdownDuration: 0, - averageDrawdown: 0, - drawdownPeriods: [] - }; - } - - let peak = equityCurve[0].value; - let peakDate = equityCurve[0].date; - let maxDrawdown = 0; - let maxDrawdownDuration = 0; - - const drawdownPeriods: Array<{ - start: Date; - end: Date; - duration: number; - magnitude: number; - }> = []; - - let currentDrawdownStart: Date | null = null; - let drawdowns: number[] = []; - - for (let i = 1; i < equityCurve.length; i++) { - const current = equityCurve[i]; - - if (current.value > peak) { - // New peak - end any current drawdown - if (currentDrawdownStart) { - const drawdownMagnitude = (peak - equityCurve[i - 1].value) / peak; - const duration = Math.floor((equityCurve[i - 1].date.getTime() - currentDrawdownStart.getTime()) / (1000 * 60 * 60 * 24)); - - drawdownPeriods.push({ - start: currentDrawdownStart, - end: equityCurve[i - 1].date, - duration, - magnitude: drawdownMagnitude - }); - - drawdowns.push(drawdownMagnitude); - maxDrawdownDuration = Math.max(maxDrawdownDuration, duration); - currentDrawdownStart = null; - } - - peak = current.value; - peakDate = current.date; - } else { - // In drawdown - if (!currentDrawdownStart) { - currentDrawdownStart = peakDate; - } - - const drawdown = (peak - current.value) / peak; - maxDrawdown = Math.max(maxDrawdown, drawdown); - } - } - - // Handle ongoing drawdown - if (currentDrawdownStart) { - const lastPoint = equityCurve[equityCurve.length - 1]; - const drawdownMagnitude = (peak - lastPoint.value) / peak; - const duration = Math.floor((lastPoint.date.getTime() - currentDrawdownStart.getTime()) / (1000 * 60 * 60 * 24)); - - drawdownPeriods.push({ - start: currentDrawdownStart, - end: lastPoint.date, - duration, - magnitude: drawdownMagnitude - }); - - drawdowns.push(drawdownMagnitude); - maxDrawdownDuration = Math.max(maxDrawdownDuration, duration); - } - - const averageDrawdown = drawdowns.length > 0 ? drawdowns.reduce((sum, dd) => sum + dd, 0) / drawdowns.length : 0; - - return { - maxDrawdown, - maxDrawdownDuration, - averageDrawdown, - drawdownPeriods - }; -} - -/** - * Analyze return characteristics - */ -export function analyzeReturns( - returns: Array<{ return: number; date: Date }>, - periodsPerYear: number = 252 -): ReturnAnalysis { - if (returns.length === 0) { - return { - totalReturn: 0, - annualizedReturn: 0, - compoundAnnualGrowthRate: 0, - volatility: 0, - annualizedVolatility: 0, - skewness: 0, - kurtosis: 0, - bestMonth: 0, - worstMonth: 0, - positiveMonths: 0, - negativeMonths: 0 - }; - } - - const returnValues = returns.map(r => r.return); - - // Calculate basic statistics - const totalReturn = returnValues.reduce((product, ret) => product * (1 + ret), 1) - 1; - const averageReturn = returnValues.reduce((sum, ret) => sum + ret, 0) / returnValues.length; - const annualizedReturn = Math.pow(1 + averageReturn, periodsPerYear) - 1; - - // Calculate CAGR - const years = returns.length / periodsPerYear; - const cagr = years > 0 ? Math.pow(1 + totalReturn, 1 / years) - 1 : 0; - - // Calculate volatility - const variance = returnValues.reduce((sum, ret) => sum + Math.pow(ret - averageReturn, 2), 0) / (returnValues.length - 1); - const volatility = Math.sqrt(variance); - const annualizedVolatility = volatility * Math.sqrt(periodsPerYear); - - // Calculate skewness and kurtosis - const skewness = calculateSkewness(returnValues); - const kurtosis = calculateKurtosis(returnValues); - - // Monthly analysis - const monthlyReturns = aggregateMonthlyReturns(returns); - const bestMonth = monthlyReturns.length > 0 ? Math.max(...monthlyReturns) : 0; - const worstMonth = monthlyReturns.length > 0 ? Math.min(...monthlyReturns) : 0; - const positiveMonths = monthlyReturns.filter(ret => ret > 0).length; - const negativeMonths = monthlyReturns.filter(ret => ret < 0).length; - - return { - totalReturn, - annualizedReturn, - compoundAnnualGrowthRate: cagr, - volatility, - annualizedVolatility, - skewness, - kurtosis, - bestMonth, - worstMonth, - positiveMonths, - negativeMonths - }; -} - -/** - * Calculate rolling performance metrics - */ -export function calculateRollingMetrics( - returns: number[], - windowSize: number, - metricType: 'sharpe' | 'volatility' | 'return' = 'sharpe' -): number[] { - if (returns.length < windowSize) return []; - - const rollingMetrics: number[] = []; - - for (let i = windowSize - 1; i < returns.length; i++) { - const window = returns.slice(i - windowSize + 1, i + 1); - - switch (metricType) { - case 'sharpe': - rollingMetrics.push(calculateSharpeRatio(window)); - break; - case 'volatility': - rollingMetrics.push(calculateVolatility(window)); - break; - case 'return': - const avgReturn = window.reduce((sum, ret) => sum + ret, 0) / window.length; - rollingMetrics.push(avgReturn); - break; - } - } - - return rollingMetrics; -} - -/** - * Calculate performance attribution - */ -export function strategyPerformanceAttribution( - portfolioReturns: number[], - benchmarkReturns: number[], - sectorWeights: number[], - sectorReturns: number[] -): { - allocationEffect: number; - selectionEffect: number; - interactionEffect: number; - totalActiveReturn: number; -} { - if (portfolioReturns.length !== benchmarkReturns.length) { - throw new Error('Portfolio and benchmark returns must have same length'); - } - - const portfolioReturn = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; - const benchmarkReturn = benchmarkReturns.reduce((sum, ret) => sum + ret, 0) / benchmarkReturns.length; - - let allocationEffect = 0; - let selectionEffect = 0; - let interactionEffect = 0; - - for (let i = 0; i < sectorWeights.length; i++) { - const portfolioWeight = sectorWeights[i]; - const benchmarkWeight = 1 / sectorWeights.length; // Assuming equal benchmark weights - const sectorReturn = sectorReturns[i]; - - // Allocation effect: (portfolio weight - benchmark weight) * (benchmark sector return - benchmark return) - allocationEffect += (portfolioWeight - benchmarkWeight) * (sectorReturn - benchmarkReturn); - - // Selection effect: benchmark weight * (portfolio sector return - benchmark sector return) - selectionEffect += benchmarkWeight * (sectorReturn - sectorReturn); // Simplified - - // Interaction effect: (portfolio weight - benchmark weight) * (portfolio sector return - benchmark sector return) - interactionEffect += (portfolioWeight - benchmarkWeight) * (sectorReturn - sectorReturn); // Simplified - } - - const totalActiveReturn = portfolioReturn - benchmarkReturn; - - return { - allocationEffect, - selectionEffect, - interactionEffect, - totalActiveReturn - }; -} - -/** - * Calculate Omega ratio - */ -export function omegaRatio(returns: number[], threshold: number = 0): number { - if (returns.length === 0) return 0; - - const gains = returns.filter(ret => ret > threshold).reduce((sum, ret) => sum + (ret - threshold), 0); - const losses = returns.filter(ret => ret < threshold).reduce((sum, ret) => sum + Math.abs(ret - threshold), 0); - - return losses === 0 ? Infinity : gains / losses; -} - -/** - * Calculate gain-to-pain ratio - */ -export function gainToPainRatio(returns: number[]): number { - if (returns.length === 0) return 0; - - const totalGain = returns.reduce((sum, ret) => sum + ret, 0); - const totalPain = returns.filter(ret => ret < 0).reduce((sum, ret) => sum + Math.abs(ret), 0); - - return totalPain === 0 ? (totalGain > 0 ? Infinity : 0) : totalGain / totalPain; -} - -/** - * Calculate Martin ratio (modified Sharpe with downside deviation) - */ -export function martinRatio(returns: number[], riskFreeRate: number = 0): number { - if (returns.length === 0) return 0; - - const averageReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const downsideReturns = returns.filter(ret => ret < riskFreeRate); - - if (downsideReturns.length === 0) return Infinity; - - const downsideDeviation = Math.sqrt( - downsideReturns.reduce((sum, ret) => sum + Math.pow(ret - riskFreeRate, 2), 0) / returns.length - ); - - return downsideDeviation === 0 ? Infinity : (averageReturn - riskFreeRate) / downsideDeviation; -} - -/** - * Calculate comprehensive portfolio metrics - */ -export function calculateStrategyMetrics( - equityCurve: Array<{ value: number; date: Date }>, - benchmarkReturns?: number[], - riskFreeRate: number = 0.02 -): PortfolioMetrics { - if (equityCurve.length < 2) { - return { - totalValue: 0, - totalReturn: 0, - totalReturnPercent: 0, - dailyReturn: 0, - dailyReturnPercent: 0, - maxDrawdown: 0, - sharpeRatio: 0, - beta: 0, - alpha: 0, - volatility: 0 - }; - } - - const returns = []; - for (let i = 1; i < equityCurve.length; i++) { - const ret = (equityCurve[i].value - equityCurve[i - 1].value) / equityCurve[i - 1].value; - returns.push(ret); - } - - const totalValue = equityCurve[equityCurve.length - 1].value; - const totalReturn = totalValue - equityCurve[0].value; - const totalReturnPercent = (totalReturn / equityCurve[0].value) * 100; - - const dailyReturn = returns[returns.length - 1]; - const dailyReturnPercent = dailyReturn * 100; - - const maxDrawdown = analyzeDrawdowns(equityCurve).maxDrawdown; - const sharpeRatio = calculateSharpeRatio(returns, riskFreeRate); - const volatility = calculateVolatility(returns); - - let beta = 0; - let alpha = 0; - - if (benchmarkReturns && benchmarkReturns.length === returns.length) { - beta = calculateBeta(returns, benchmarkReturns); - alpha = calculateAlpha(returns, benchmarkReturns, riskFreeRate); - } - - return { - totalValue, - totalReturn, - totalReturnPercent, - dailyReturn, - dailyReturnPercent, - maxDrawdown, - sharpeRatio, - beta, - alpha, - volatility - }; -} - -/** - * Calculate Calmar Ratio - */ -export function calmarRatio(returns: number[], equityCurve: Array<{ value: number; date: Date }>, riskFreeRate: number = 0): number { - const maxDrawdown = analyzeDrawdowns(equityCurve).maxDrawdown; - const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - - return maxDrawdown === 0 ? 0 : (avgReturn - riskFreeRate) / maxDrawdown; -} - -/** - * Calculate Sterling Ratio - */ -export function sterlingRatio(returns: number[], equityCurve: Array<{ value: number; date: Date }>, riskFreeRate: number = 0): number { - const averageDrawdown = analyzeDrawdowns(equityCurve).averageDrawdown; - const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - - return averageDrawdown === 0 ? 0 : (avgReturn - riskFreeRate) / averageDrawdown; -} - - -/** - * Calculate Information Ratio - */ -export function informationRatio(portfolioReturns: number[], benchmarkReturns: number[]): number { - if (portfolioReturns.length !== benchmarkReturns.length) { - throw new Error("Portfolio and benchmark returns must have the same length."); - } - - const excessReturns = portfolioReturns.map((portfolioReturn, index) => portfolioReturn - benchmarkReturns[index]); - const trackingError = calculateVolatility(excessReturns); - const avgExcessReturn = excessReturns.reduce((sum, ret) => sum + ret, 0) / excessReturns.length; - - return trackingError === 0 ? 0 : avgExcessReturn / trackingError; -} - -/** - * Calculate Treynor Ratio - */ -export function treynorRatio(portfolioReturns: number[], marketReturns: number[], riskFreeRate: number): number { - const beta = calculateBeta(portfolioReturns, marketReturns); - const avgPortfolioReturn = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; - - return beta === 0 ? 0 : (avgPortfolioReturn - riskFreeRate) / beta; -} - -/** - * Calculate Jensen's Alpha (same as Alpha, but included for clarity) - */ -export function jensensAlpha(portfolioReturns: number[], marketReturns: number[], riskFreeRate: number): number { - return calculateAlpha(portfolioReturns, marketReturns, riskFreeRate); -} - -/** - * Calculate Capture Ratio (Up Capture and Down Capture) - */ -export function captureRatio(portfolioReturns: number[], benchmarkReturns: number[]): { upCaptureRatio: number; downCaptureRatio: number } { - let upCapture = 0; - let downCapture = 0; - let upMarketPeriods = 0; - let downMarketPeriods = 0; - - for (let i = 0; i < portfolioReturns.length; i++) { - if (benchmarkReturns[i] > 0) { - upCapture += portfolioReturns[i]; - upMarketPeriods++; - } else if (benchmarkReturns[i] < 0) { - downCapture += portfolioReturns[i]; - downMarketPeriods++; - } - } - - const upCaptureRatio = upMarketPeriods > 0 ? (upCapture / upMarketPeriods) / (benchmarkReturns.filter(r => r > 0).reduce((sum, r) => sum + r, 0) / upMarketPeriods) : 0; - const downCaptureRatio = downMarketPeriods > 0 ? (downCapture / downMarketPeriods) / (benchmarkReturns.filter(r => r < 0).reduce((sum, r) => sum + r, 0) / downMarketPeriods) : 0; - - return { upCaptureRatio, downCaptureRatio }; -} - -/** - * Calculate Sortino Ratio - */ -export function sortinoRatio(returns: number[], riskFreeRate: number = 0): number { - const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const downsideReturns = returns.filter(ret => ret < riskFreeRate); - const downsideDeviation = Math.sqrt( - downsideReturns.reduce((sum, ret) => sum + Math.pow(ret - riskFreeRate, 2), 0) / returns.length - ); - - return downsideDeviation === 0 ? 0 : (avgReturn - riskFreeRate) / downsideDeviation; -} - -/** - * Calculate Tail Ratio - */ -export function tailRatio(returns: number[], tailPercent: number = 0.1): number { - const numReturns = returns.length; - const tailSize = Math.floor(numReturns * tailPercent); - - if (tailSize === 0) return 0; - - const sortedReturns = [...returns].sort((a, b) => a - b); - const worstTail = sortedReturns.slice(0, tailSize); - const bestTail = sortedReturns.slice(numReturns - tailSize); - - const avgWorst = worstTail.reduce((sum, ret) => sum + ret, 0) / tailSize; - const avgBest = bestTail.reduce((sum, ret) => sum + ret, 0) / tailSize; - - return avgWorst === 0 ? 0 : avgBest / Math.abs(avgWorst); -} - -/** - * Calculate Rolling Beta - */ -export function calculateRollingBeta(portfolioReturns: number[], marketReturns: number[], windowSize: number): number[] { - if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < windowSize) return []; - - const rollingBetas: number[] = []; - - for (let i = windowSize; i <= portfolioReturns.length; i++) { - const portfolioWindow = portfolioReturns.slice(i - windowSize, i); - const marketWindow = marketReturns.slice(i - windowSize, i); - rollingBetas.push(calculateBeta(portfolioWindow, marketWindow)); - } - - return rollingBetas; -} - -/** - * Calculate Ulcer Performance Index (UPI) - */ -export function ulcerPerformanceIndex(returns: number[], equityCurve: Array<{ value: number; date: Date }>, riskFreeRate: number = 0): number { - const ui = ulcerIndex(equityCurve); - const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - - return ui === 0 ? 0 : (avgReturn - riskFreeRate) / ui; -} - - -/** - * Calculate Rolling Alpha - */ -export function calculateRollingAlpha(portfolioReturns: number[], marketReturns: number[], riskFreeRate: number, windowSize: number): number[] { - if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < windowSize) return []; - - const rollingAlphas: number[] = []; - - for (let i = windowSize; i <= portfolioReturns.length; i++) { - const portfolioWindow = portfolioReturns.slice(i - windowSize, i); - const marketWindow = marketReturns.slice(i - windowSize, i); - rollingAlphas.push(calculateAlpha(portfolioWindow, marketWindow, riskFreeRate)); - } - - return rollingAlphas; -} - -/** - * Calculate Time Weighted Rate of Return (TWRR) - */ -export function timeWeightedRateOfReturn(cashFlows: Array<{ amount: number; date: Date; value: number }>): number { - let totalReturn = 1; - let previousValue = cashFlows[0].value; - - for (let i = 1; i < cashFlows.length; i++) { - const current = cashFlows[i]; - const periodReturn = (current.value - previousValue - current.amount) / (previousValue + current.amount); - totalReturn *= (1 + periodReturn); - previousValue = current.value; - } - - return totalReturn - 1; -} - -/** - * Calculate Money Weighted Rate of Return (MWRR) - Approximation using IRR - */ -export function moneyWeightedRateOfReturn(cashFlows: Array<{ amount: number; date: Date; value: number }>): number { - // Approximate MWRR using Internal Rate of Return (IRR) - // This requires a numerical method or library for accurate IRR calculation - // This is a simplified example and may not be accurate for all cases - - let totalCashFlow = 0; - let totalWeightedCashFlow = 0; - const startDate = cashFlows[0].date.getTime(); - - for (const cf of cashFlows) { - const timeDiff = (cf.date.getTime() - startDate) / (1000 * 60 * 60 * 24 * 365); // Years - totalCashFlow += cf.amount; - totalWeightedCashFlow += cf.amount * timeDiff; - } - - // Simplified approximation: MWRR β‰ˆ totalCashFlow / totalWeightedCashFlow - 1 - return totalCashFlow / totalWeightedCashFlow - 1; -} - -// Helper functions - -function calculateSharpeRatio(returns: number[], riskFreeRate: number = 0): number { - if (returns.length < 2) return 0; - - const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - avgReturn, 2), 0) / (returns.length - 1); - const stdDev = Math.sqrt(variance); - - return stdDev === 0 ? 0 : (avgReturn - riskFreeRate) / stdDev; -} - -function calculateVolatility(returns: number[]): number { - if (returns.length < 2) return 0; - - const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); - - return Math.sqrt(variance); -} - -function calculateBeta(portfolioReturns: number[], marketReturns: number[]): number { - if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < 2) return 0; - - const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; - const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length; - - let covariance = 0; - let marketVariance = 0; - - for (let i = 0; i < portfolioReturns.length; i++) { - const portfolioDiff = portfolioReturns[i] - portfolioMean; - const marketDiff = marketReturns[i] - marketMean; - - covariance += portfolioDiff * marketDiff; - marketVariance += marketDiff * marketDiff; - } - - covariance /= (portfolioReturns.length - 1); - marketVariance /= (marketReturns.length - 1); - - return marketVariance === 0 ? 0 : covariance / marketVariance; -} - -function calculateAlpha( - portfolioReturns: number[], - marketReturns: number[], - riskFreeRate: number -): number { - const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; - const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length; - const beta = calculateBeta(portfolioReturns, marketReturns); - - return portfolioMean - (riskFreeRate + beta * (marketMean - riskFreeRate)); -} - -function calculateSkewness(returns: number[]): number { - if (returns.length < 3) return 0; - - const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length; - const stdDev = Math.sqrt(variance); - - if (stdDev === 0) return 0; - - const skew = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 3), 0) / returns.length; - - return skew; -} - -function calculateKurtosis(returns: number[]): number { - if (returns.length < 4) return 0; - - const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length; - const stdDev = Math.sqrt(variance); - - if (stdDev === 0) return 0; - - const kurt = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 4), 0) / returns.length; - - return kurt - 3; // Excess kurtosis -} - -function aggregateMonthlyReturns(returns: Array<{ return: number; date: Date }>): number[] { - const monthlyReturns: { [key: string]: number } = {}; - - for (const ret of returns) { - const monthKey = `${ret.date.getFullYear()}-${ret.date.getMonth()}`; - if (!monthlyReturns[monthKey]) { - monthlyReturns[monthKey] = 1; - } - monthlyReturns[monthKey] *= (1 + ret.return); - } - - return Object.values(monthlyReturns).map(cumReturn => cumReturn - 1); -} +/** + * Performance Metrics and Analysis + * Comprehensive performance measurement tools for trading strategies and portfolios + */ + +import { PortfolioMetrics, ulcerIndex } from './index'; + +export interface TradePerformance { + totalTrades: number; + winningTrades: number; + losingTrades: number; + winRate: number; + averageWin: number; + averageLoss: number; + largestWin: number; + largestLoss: number; + profitFactor: number; + expectancy: number; + averageTradeReturn: number; + consecutiveWins: number; + consecutiveLosses: number; +} + +export interface DrawdownAnalysis { + maxDrawdown: number; + maxDrawdownDuration: number; + averageDrawdown: number; + drawdownPeriods: Array<{ + start: Date; + end: Date; + duration: number; + magnitude: number; + }>; +} + +export interface ReturnAnalysis { + totalReturn: number; + annualizedReturn: number; + compoundAnnualGrowthRate: number; + volatility: number; + annualizedVolatility: number; + skewness: number; + kurtosis: number; + bestMonth: number; + worstMonth: number; + positiveMonths: number; + negativeMonths: number; +} + +/** + * Calculate comprehensive trade performance metrics + */ +export function analyzeTradePerformance(trades: Array<{ pnl: number; date: Date }>): TradePerformance { + if (trades.length === 0) { + return { + totalTrades: 0, + winningTrades: 0, + losingTrades: 0, + winRate: 0, + averageWin: 0, + averageLoss: 0, + largestWin: 0, + largestLoss: 0, + profitFactor: 0, + expectancy: 0, + averageTradeReturn: 0, + consecutiveWins: 0, + consecutiveLosses: 0 + }; + } + + const winningTrades = trades.filter(trade => trade.pnl > 0); + const losingTrades = trades.filter(trade => trade.pnl < 0); + + const totalWins = winningTrades.reduce((sum, trade) => sum + trade.pnl, 0); + const totalLosses = Math.abs(losingTrades.reduce((sum, trade) => sum + trade.pnl, 0)); + + const averageWin = winningTrades.length > 0 ? totalWins / winningTrades.length : 0; + const averageLoss = losingTrades.length > 0 ? totalLosses / losingTrades.length : 0; + + const largestWin = winningTrades.length > 0 ? Math.max(...winningTrades.map(t => t.pnl)) : 0; + const largestLoss = losingTrades.length > 0 ? Math.min(...losingTrades.map(t => t.pnl)) : 0; + + const profitFactor = totalLosses > 0 ? totalWins / totalLosses : totalWins > 0 ? Infinity : 0; + const winRate = winningTrades.length / trades.length; + const expectancy = (winRate * averageWin) - ((1 - winRate) * averageLoss); + + const totalPnL = trades.reduce((sum, trade) => sum + trade.pnl, 0); + const averageTradeReturn = totalPnL / trades.length; + + // Calculate consecutive wins/losses + let consecutiveWins = 0; + let consecutiveLosses = 0; + let currentWinStreak = 0; + let currentLossStreak = 0; + + for (const trade of trades) { + if (trade.pnl > 0) { + currentWinStreak++; + currentLossStreak = 0; + consecutiveWins = Math.max(consecutiveWins, currentWinStreak); + } else if (trade.pnl < 0) { + currentLossStreak++; + currentWinStreak = 0; + consecutiveLosses = Math.max(consecutiveLosses, currentLossStreak); + } + } + + return { + totalTrades: trades.length, + winningTrades: winningTrades.length, + losingTrades: losingTrades.length, + winRate, + averageWin, + averageLoss, + largestWin, + largestLoss, + profitFactor, + expectancy, + averageTradeReturn, + consecutiveWins, + consecutiveLosses + }; +} + +/** + * Analyze drawdown characteristics + */ +export function analyzeDrawdowns(equityCurve: Array<{ value: number; date: Date }>): DrawdownAnalysis { + if (equityCurve.length < 2) { + return { + maxDrawdown: 0, + maxDrawdownDuration: 0, + averageDrawdown: 0, + drawdownPeriods: [] + }; + } + + let peak = equityCurve[0].value; + let peakDate = equityCurve[0].date; + let maxDrawdown = 0; + let maxDrawdownDuration = 0; + + const drawdownPeriods: Array<{ + start: Date; + end: Date; + duration: number; + magnitude: number; + }> = []; + + let currentDrawdownStart: Date | null = null; + let drawdowns: number[] = []; + + for (let i = 1; i < equityCurve.length; i++) { + const current = equityCurve[i]; + + if (current.value > peak) { + // New peak - end any current drawdown + if (currentDrawdownStart) { + const drawdownMagnitude = (peak - equityCurve[i - 1].value) / peak; + const duration = Math.floor((equityCurve[i - 1].date.getTime() - currentDrawdownStart.getTime()) / (1000 * 60 * 60 * 24)); + + drawdownPeriods.push({ + start: currentDrawdownStart, + end: equityCurve[i - 1].date, + duration, + magnitude: drawdownMagnitude + }); + + drawdowns.push(drawdownMagnitude); + maxDrawdownDuration = Math.max(maxDrawdownDuration, duration); + currentDrawdownStart = null; + } + + peak = current.value; + peakDate = current.date; + } else { + // In drawdown + if (!currentDrawdownStart) { + currentDrawdownStart = peakDate; + } + + const drawdown = (peak - current.value) / peak; + maxDrawdown = Math.max(maxDrawdown, drawdown); + } + } + + // Handle ongoing drawdown + if (currentDrawdownStart) { + const lastPoint = equityCurve[equityCurve.length - 1]; + const drawdownMagnitude = (peak - lastPoint.value) / peak; + const duration = Math.floor((lastPoint.date.getTime() - currentDrawdownStart.getTime()) / (1000 * 60 * 60 * 24)); + + drawdownPeriods.push({ + start: currentDrawdownStart, + end: lastPoint.date, + duration, + magnitude: drawdownMagnitude + }); + + drawdowns.push(drawdownMagnitude); + maxDrawdownDuration = Math.max(maxDrawdownDuration, duration); + } + + const averageDrawdown = drawdowns.length > 0 ? drawdowns.reduce((sum, dd) => sum + dd, 0) / drawdowns.length : 0; + + return { + maxDrawdown, + maxDrawdownDuration, + averageDrawdown, + drawdownPeriods + }; +} + +/** + * Analyze return characteristics + */ +export function analyzeReturns( + returns: Array<{ return: number; date: Date }>, + periodsPerYear: number = 252 +): ReturnAnalysis { + if (returns.length === 0) { + return { + totalReturn: 0, + annualizedReturn: 0, + compoundAnnualGrowthRate: 0, + volatility: 0, + annualizedVolatility: 0, + skewness: 0, + kurtosis: 0, + bestMonth: 0, + worstMonth: 0, + positiveMonths: 0, + negativeMonths: 0 + }; + } + + const returnValues = returns.map(r => r.return); + + // Calculate basic statistics + const totalReturn = returnValues.reduce((product, ret) => product * (1 + ret), 1) - 1; + const averageReturn = returnValues.reduce((sum, ret) => sum + ret, 0) / returnValues.length; + const annualizedReturn = Math.pow(1 + averageReturn, periodsPerYear) - 1; + + // Calculate CAGR + const years = returns.length / periodsPerYear; + const cagr = years > 0 ? Math.pow(1 + totalReturn, 1 / years) - 1 : 0; + + // Calculate volatility + const variance = returnValues.reduce((sum, ret) => sum + Math.pow(ret - averageReturn, 2), 0) / (returnValues.length - 1); + const volatility = Math.sqrt(variance); + const annualizedVolatility = volatility * Math.sqrt(periodsPerYear); + + // Calculate skewness and kurtosis + const skewness = calculateSkewness(returnValues); + const kurtosis = calculateKurtosis(returnValues); + + // Monthly analysis + const monthlyReturns = aggregateMonthlyReturns(returns); + const bestMonth = monthlyReturns.length > 0 ? Math.max(...monthlyReturns) : 0; + const worstMonth = monthlyReturns.length > 0 ? Math.min(...monthlyReturns) : 0; + const positiveMonths = monthlyReturns.filter(ret => ret > 0).length; + const negativeMonths = monthlyReturns.filter(ret => ret < 0).length; + + return { + totalReturn, + annualizedReturn, + compoundAnnualGrowthRate: cagr, + volatility, + annualizedVolatility, + skewness, + kurtosis, + bestMonth, + worstMonth, + positiveMonths, + negativeMonths + }; +} + +/** + * Calculate rolling performance metrics + */ +export function calculateRollingMetrics( + returns: number[], + windowSize: number, + metricType: 'sharpe' | 'volatility' | 'return' = 'sharpe' +): number[] { + if (returns.length < windowSize) return []; + + const rollingMetrics: number[] = []; + + for (let i = windowSize - 1; i < returns.length; i++) { + const window = returns.slice(i - windowSize + 1, i + 1); + + switch (metricType) { + case 'sharpe': + rollingMetrics.push(calculateSharpeRatio(window)); + break; + case 'volatility': + rollingMetrics.push(calculateVolatility(window)); + break; + case 'return': + const avgReturn = window.reduce((sum, ret) => sum + ret, 0) / window.length; + rollingMetrics.push(avgReturn); + break; + } + } + + return rollingMetrics; +} + +/** + * Calculate performance attribution + */ +export function strategyPerformanceAttribution( + portfolioReturns: number[], + benchmarkReturns: number[], + sectorWeights: number[], + sectorReturns: number[] +): { + allocationEffect: number; + selectionEffect: number; + interactionEffect: number; + totalActiveReturn: number; +} { + if (portfolioReturns.length !== benchmarkReturns.length) { + throw new Error('Portfolio and benchmark returns must have same length'); + } + + const portfolioReturn = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; + const benchmarkReturn = benchmarkReturns.reduce((sum, ret) => sum + ret, 0) / benchmarkReturns.length; + + let allocationEffect = 0; + let selectionEffect = 0; + let interactionEffect = 0; + + for (let i = 0; i < sectorWeights.length; i++) { + const portfolioWeight = sectorWeights[i]; + const benchmarkWeight = 1 / sectorWeights.length; // Assuming equal benchmark weights + const sectorReturn = sectorReturns[i]; + + // Allocation effect: (portfolio weight - benchmark weight) * (benchmark sector return - benchmark return) + allocationEffect += (portfolioWeight - benchmarkWeight) * (sectorReturn - benchmarkReturn); + + // Selection effect: benchmark weight * (portfolio sector return - benchmark sector return) + selectionEffect += benchmarkWeight * (sectorReturn - sectorReturn); // Simplified + + // Interaction effect: (portfolio weight - benchmark weight) * (portfolio sector return - benchmark sector return) + interactionEffect += (portfolioWeight - benchmarkWeight) * (sectorReturn - sectorReturn); // Simplified + } + + const totalActiveReturn = portfolioReturn - benchmarkReturn; + + return { + allocationEffect, + selectionEffect, + interactionEffect, + totalActiveReturn + }; +} + +/** + * Calculate Omega ratio + */ +export function omegaRatio(returns: number[], threshold: number = 0): number { + if (returns.length === 0) return 0; + + const gains = returns.filter(ret => ret > threshold).reduce((sum, ret) => sum + (ret - threshold), 0); + const losses = returns.filter(ret => ret < threshold).reduce((sum, ret) => sum + Math.abs(ret - threshold), 0); + + return losses === 0 ? Infinity : gains / losses; +} + +/** + * Calculate gain-to-pain ratio + */ +export function gainToPainRatio(returns: number[]): number { + if (returns.length === 0) return 0; + + const totalGain = returns.reduce((sum, ret) => sum + ret, 0); + const totalPain = returns.filter(ret => ret < 0).reduce((sum, ret) => sum + Math.abs(ret), 0); + + return totalPain === 0 ? (totalGain > 0 ? Infinity : 0) : totalGain / totalPain; +} + +/** + * Calculate Martin ratio (modified Sharpe with downside deviation) + */ +export function martinRatio(returns: number[], riskFreeRate: number = 0): number { + if (returns.length === 0) return 0; + + const averageReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const downsideReturns = returns.filter(ret => ret < riskFreeRate); + + if (downsideReturns.length === 0) return Infinity; + + const downsideDeviation = Math.sqrt( + downsideReturns.reduce((sum, ret) => sum + Math.pow(ret - riskFreeRate, 2), 0) / returns.length + ); + + return downsideDeviation === 0 ? Infinity : (averageReturn - riskFreeRate) / downsideDeviation; +} + +/** + * Calculate comprehensive portfolio metrics + */ +export function calculateStrategyMetrics( + equityCurve: Array<{ value: number; date: Date }>, + benchmarkReturns?: number[], + riskFreeRate: number = 0.02 +): PortfolioMetrics { + if (equityCurve.length < 2) { + return { + totalValue: 0, + totalReturn: 0, + totalReturnPercent: 0, + dailyReturn: 0, + dailyReturnPercent: 0, + maxDrawdown: 0, + sharpeRatio: 0, + beta: 0, + alpha: 0, + volatility: 0 + }; + } + + const returns = []; + for (let i = 1; i < equityCurve.length; i++) { + const ret = (equityCurve[i].value - equityCurve[i - 1].value) / equityCurve[i - 1].value; + returns.push(ret); + } + + const totalValue = equityCurve[equityCurve.length - 1].value; + const totalReturn = totalValue - equityCurve[0].value; + const totalReturnPercent = (totalReturn / equityCurve[0].value) * 100; + + const dailyReturn = returns[returns.length - 1]; + const dailyReturnPercent = dailyReturn * 100; + + const maxDrawdown = analyzeDrawdowns(equityCurve).maxDrawdown; + const sharpeRatio = calculateSharpeRatio(returns, riskFreeRate); + const volatility = calculateVolatility(returns); + + let beta = 0; + let alpha = 0; + + if (benchmarkReturns && benchmarkReturns.length === returns.length) { + beta = calculateBeta(returns, benchmarkReturns); + alpha = calculateAlpha(returns, benchmarkReturns, riskFreeRate); + } + + return { + totalValue, + totalReturn, + totalReturnPercent, + dailyReturn, + dailyReturnPercent, + maxDrawdown, + sharpeRatio, + beta, + alpha, + volatility + }; +} + +/** + * Calculate Calmar Ratio + */ +export function calmarRatio(returns: number[], equityCurve: Array<{ value: number; date: Date }>, riskFreeRate: number = 0): number { + const maxDrawdown = analyzeDrawdowns(equityCurve).maxDrawdown; + const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + + return maxDrawdown === 0 ? 0 : (avgReturn - riskFreeRate) / maxDrawdown; +} + +/** + * Calculate Sterling Ratio + */ +export function sterlingRatio(returns: number[], equityCurve: Array<{ value: number; date: Date }>, riskFreeRate: number = 0): number { + const averageDrawdown = analyzeDrawdowns(equityCurve).averageDrawdown; + const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + + return averageDrawdown === 0 ? 0 : (avgReturn - riskFreeRate) / averageDrawdown; +} + + +/** + * Calculate Information Ratio + */ +export function informationRatio(portfolioReturns: number[], benchmarkReturns: number[]): number { + if (portfolioReturns.length !== benchmarkReturns.length) { + throw new Error("Portfolio and benchmark returns must have the same length."); + } + + const excessReturns = portfolioReturns.map((portfolioReturn, index) => portfolioReturn - benchmarkReturns[index]); + const trackingError = calculateVolatility(excessReturns); + const avgExcessReturn = excessReturns.reduce((sum, ret) => sum + ret, 0) / excessReturns.length; + + return trackingError === 0 ? 0 : avgExcessReturn / trackingError; +} + +/** + * Calculate Treynor Ratio + */ +export function treynorRatio(portfolioReturns: number[], marketReturns: number[], riskFreeRate: number): number { + const beta = calculateBeta(portfolioReturns, marketReturns); + const avgPortfolioReturn = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; + + return beta === 0 ? 0 : (avgPortfolioReturn - riskFreeRate) / beta; +} + +/** + * Calculate Jensen's Alpha (same as Alpha, but included for clarity) + */ +export function jensensAlpha(portfolioReturns: number[], marketReturns: number[], riskFreeRate: number): number { + return calculateAlpha(portfolioReturns, marketReturns, riskFreeRate); +} + +/** + * Calculate Capture Ratio (Up Capture and Down Capture) + */ +export function captureRatio(portfolioReturns: number[], benchmarkReturns: number[]): { upCaptureRatio: number; downCaptureRatio: number } { + let upCapture = 0; + let downCapture = 0; + let upMarketPeriods = 0; + let downMarketPeriods = 0; + + for (let i = 0; i < portfolioReturns.length; i++) { + if (benchmarkReturns[i] > 0) { + upCapture += portfolioReturns[i]; + upMarketPeriods++; + } else if (benchmarkReturns[i] < 0) { + downCapture += portfolioReturns[i]; + downMarketPeriods++; + } + } + + const upCaptureRatio = upMarketPeriods > 0 ? (upCapture / upMarketPeriods) / (benchmarkReturns.filter(r => r > 0).reduce((sum, r) => sum + r, 0) / upMarketPeriods) : 0; + const downCaptureRatio = downMarketPeriods > 0 ? (downCapture / downMarketPeriods) / (benchmarkReturns.filter(r => r < 0).reduce((sum, r) => sum + r, 0) / downMarketPeriods) : 0; + + return { upCaptureRatio, downCaptureRatio }; +} + +/** + * Calculate Sortino Ratio + */ +export function sortinoRatio(returns: number[], riskFreeRate: number = 0): number { + const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const downsideReturns = returns.filter(ret => ret < riskFreeRate); + const downsideDeviation = Math.sqrt( + downsideReturns.reduce((sum, ret) => sum + Math.pow(ret - riskFreeRate, 2), 0) / returns.length + ); + + return downsideDeviation === 0 ? 0 : (avgReturn - riskFreeRate) / downsideDeviation; +} + +/** + * Calculate Tail Ratio + */ +export function tailRatio(returns: number[], tailPercent: number = 0.1): number { + const numReturns = returns.length; + const tailSize = Math.floor(numReturns * tailPercent); + + if (tailSize === 0) return 0; + + const sortedReturns = [...returns].sort((a, b) => a - b); + const worstTail = sortedReturns.slice(0, tailSize); + const bestTail = sortedReturns.slice(numReturns - tailSize); + + const avgWorst = worstTail.reduce((sum, ret) => sum + ret, 0) / tailSize; + const avgBest = bestTail.reduce((sum, ret) => sum + ret, 0) / tailSize; + + return avgWorst === 0 ? 0 : avgBest / Math.abs(avgWorst); +} + +/** + * Calculate Rolling Beta + */ +export function calculateRollingBeta(portfolioReturns: number[], marketReturns: number[], windowSize: number): number[] { + if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < windowSize) return []; + + const rollingBetas: number[] = []; + + for (let i = windowSize; i <= portfolioReturns.length; i++) { + const portfolioWindow = portfolioReturns.slice(i - windowSize, i); + const marketWindow = marketReturns.slice(i - windowSize, i); + rollingBetas.push(calculateBeta(portfolioWindow, marketWindow)); + } + + return rollingBetas; +} + +/** + * Calculate Ulcer Performance Index (UPI) + */ +export function ulcerPerformanceIndex(returns: number[], equityCurve: Array<{ value: number; date: Date }>, riskFreeRate: number = 0): number { + const ui = ulcerIndex(equityCurve); + const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + + return ui === 0 ? 0 : (avgReturn - riskFreeRate) / ui; +} + + +/** + * Calculate Rolling Alpha + */ +export function calculateRollingAlpha(portfolioReturns: number[], marketReturns: number[], riskFreeRate: number, windowSize: number): number[] { + if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < windowSize) return []; + + const rollingAlphas: number[] = []; + + for (let i = windowSize; i <= portfolioReturns.length; i++) { + const portfolioWindow = portfolioReturns.slice(i - windowSize, i); + const marketWindow = marketReturns.slice(i - windowSize, i); + rollingAlphas.push(calculateAlpha(portfolioWindow, marketWindow, riskFreeRate)); + } + + return rollingAlphas; +} + +/** + * Calculate Time Weighted Rate of Return (TWRR) + */ +export function timeWeightedRateOfReturn(cashFlows: Array<{ amount: number; date: Date; value: number }>): number { + let totalReturn = 1; + let previousValue = cashFlows[0].value; + + for (let i = 1; i < cashFlows.length; i++) { + const current = cashFlows[i]; + const periodReturn = (current.value - previousValue - current.amount) / (previousValue + current.amount); + totalReturn *= (1 + periodReturn); + previousValue = current.value; + } + + return totalReturn - 1; +} + +/** + * Calculate Money Weighted Rate of Return (MWRR) - Approximation using IRR + */ +export function moneyWeightedRateOfReturn(cashFlows: Array<{ amount: number; date: Date; value: number }>): number { + // Approximate MWRR using Internal Rate of Return (IRR) + // This requires a numerical method or library for accurate IRR calculation + // This is a simplified example and may not be accurate for all cases + + let totalCashFlow = 0; + let totalWeightedCashFlow = 0; + const startDate = cashFlows[0].date.getTime(); + + for (const cf of cashFlows) { + const timeDiff = (cf.date.getTime() - startDate) / (1000 * 60 * 60 * 24 * 365); // Years + totalCashFlow += cf.amount; + totalWeightedCashFlow += cf.amount * timeDiff; + } + + // Simplified approximation: MWRR β‰ˆ totalCashFlow / totalWeightedCashFlow - 1 + return totalCashFlow / totalWeightedCashFlow - 1; +} + +// Helper functions + +function calculateSharpeRatio(returns: number[], riskFreeRate: number = 0): number { + if (returns.length < 2) return 0; + + const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - avgReturn, 2), 0) / (returns.length - 1); + const stdDev = Math.sqrt(variance); + + return stdDev === 0 ? 0 : (avgReturn - riskFreeRate) / stdDev; +} + +function calculateVolatility(returns: number[]): number { + if (returns.length < 2) return 0; + + const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); + + return Math.sqrt(variance); +} + +function calculateBeta(portfolioReturns: number[], marketReturns: number[]): number { + if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < 2) return 0; + + const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; + const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length; + + let covariance = 0; + let marketVariance = 0; + + for (let i = 0; i < portfolioReturns.length; i++) { + const portfolioDiff = portfolioReturns[i] - portfolioMean; + const marketDiff = marketReturns[i] - marketMean; + + covariance += portfolioDiff * marketDiff; + marketVariance += marketDiff * marketDiff; + } + + covariance /= (portfolioReturns.length - 1); + marketVariance /= (marketReturns.length - 1); + + return marketVariance === 0 ? 0 : covariance / marketVariance; +} + +function calculateAlpha( + portfolioReturns: number[], + marketReturns: number[], + riskFreeRate: number +): number { + const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; + const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length; + const beta = calculateBeta(portfolioReturns, marketReturns); + + return portfolioMean - (riskFreeRate + beta * (marketMean - riskFreeRate)); +} + +function calculateSkewness(returns: number[]): number { + if (returns.length < 3) return 0; + + const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length; + const stdDev = Math.sqrt(variance); + + if (stdDev === 0) return 0; + + const skew = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 3), 0) / returns.length; + + return skew; +} + +function calculateKurtosis(returns: number[]): number { + if (returns.length < 4) return 0; + + const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length; + const stdDev = Math.sqrt(variance); + + if (stdDev === 0) return 0; + + const kurt = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 4), 0) / returns.length; + + return kurt - 3; // Excess kurtosis +} + +function aggregateMonthlyReturns(returns: Array<{ return: number; date: Date }>): number[] { + const monthlyReturns: { [key: string]: number } = {}; + + for (const ret of returns) { + const monthKey = `${ret.date.getFullYear()}-${ret.date.getMonth()}`; + if (!monthlyReturns[monthKey]) { + monthlyReturns[monthKey] = 1; + } + monthlyReturns[monthKey] *= (1 + ret.return); + } + + return Object.values(monthlyReturns).map(cumReturn => cumReturn - 1); +} diff --git a/libs/utils/src/calculations/portfolio-analytics.ts b/libs/utils/src/calculations/portfolio-analytics.ts index a7b045c..1880259 100644 --- a/libs/utils/src/calculations/portfolio-analytics.ts +++ b/libs/utils/src/calculations/portfolio-analytics.ts @@ -1,576 +1,576 @@ -/** - * Portfolio Analytics - * Advanced portfolio analysis and optimization tools - */ - -import { OHLCVData, PriceData } from './index'; - -export interface PortfolioPosition { - symbol: string; - shares: number; - price: number; - value: number; - weight: number; -} - -export interface PortfolioAnalysis { - totalValue: number; - totalReturn: number; - volatility: number; - sharpeRatio: number; - maxDrawdown: number; - var95: number; - beta: number; - alpha: number; - treynorRatio: number; - informationRatio: number; - trackingError: number; -} - -export interface AssetAllocation { - symbol: string; - targetWeight: number; - currentWeight: number; - difference: number; - rebalanceAmount: number; -} - -export interface PortfolioOptimizationResult { - weights: number[]; - expectedReturn: number; - volatility: number; - sharpeRatio: number; - symbols: string[]; -} - -/** - * Calculate portfolio value and weights - */ -export function calculatePortfolioMetrics(positions: PortfolioPosition[]): { - totalValue: number; - weights: number[]; - concentrationRisk: number; -} { - const totalValue = positions.reduce((sum, pos) => sum + pos.value, 0); - const weights = positions.map(pos => pos.value / totalValue); - - // Calculate Herfindahl-Hirschman Index for concentration risk - const concentrationRisk = weights.reduce((sum, weight) => sum + weight * weight, 0); - - return { - totalValue, - weights, - concentrationRisk - }; -} - -/** - * Calculate portfolio returns from position returns - */ -export function calculatePortfolioReturns( - assetReturns: number[][], - weights: number[] -): number[] { - if (assetReturns.length === 0 || weights.length !== assetReturns[0].length) { - return []; - } - - const portfolioReturns: number[] = []; - - for (let i = 0; i < assetReturns.length; i++) { - let portfolioReturn = 0; - for (let j = 0; j < weights.length; j++) { - portfolioReturn += weights[j] * assetReturns[i][j]; - } - portfolioReturns.push(portfolioReturn); - } - - return portfolioReturns; -} - -/** - * Mean-Variance Optimization (Markowitz) - */ -export function markowitzOptimization( - expectedReturns: number[], - covarianceMatrix: number[][], - riskFreeRate: number = 0.02, - riskAversion: number = 1 -): PortfolioOptimizationResult { - const n = expectedReturns.length; - - // Simplified optimization using equal weights as baseline - // In production, use proper quadratic programming solver - const weights = new Array(n).fill(1 / n); - - const expectedReturn = weights.reduce((sum, weight, i) => sum + weight * expectedReturns[i], 0); - - // Calculate portfolio variance - let portfolioVariance = 0; - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - portfolioVariance += weights[i] * weights[j] * covarianceMatrix[i][j]; - } - } - - const volatility = Math.sqrt(portfolioVariance); - const sharpeRatio = volatility > 0 ? (expectedReturn - riskFreeRate) / volatility : 0; - - return { - weights, - expectedReturn, - volatility, - sharpeRatio, - symbols: [] // Would be filled with actual symbols - }; -} - -/** - * Black-Litterman Model - */ -export function blackLittermanOptimization( - marketCaps: number[], - covarianceMatrix: number[][], - views: Array<{ assets: number[]; expectedReturn: number; confidence: number }>, - riskAversion: number = 3, - riskFreeRate: number = 0.02 -): PortfolioOptimizationResult { - const n = marketCaps.length; - - // Calculate market weights - const totalMarketCap = marketCaps.reduce((sum, cap) => sum + cap, 0); - const marketWeights = marketCaps.map(cap => cap / totalMarketCap); - - // Implied equilibrium returns - const equilibriumReturns: number[] = []; - for (let i = 0; i < n; i++) { - let equilibriumReturn = 0; - for (let j = 0; j < n; j++) { - equilibriumReturn += riskAversion * covarianceMatrix[i][j] * marketWeights[j]; - } - equilibriumReturns.push(equilibriumReturn); - } - - // Simplified BL implementation - in production use proper matrix operations - const weights = [...marketWeights]; // Start with market weights - - const expectedReturn = weights.reduce((sum, weight, i) => sum + weight * equilibriumReturns[i], 0); - - let portfolioVariance = 0; - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - portfolioVariance += weights[i] * weights[j] * covarianceMatrix[i][j]; - } - } - - const volatility = Math.sqrt(portfolioVariance); - const sharpeRatio = volatility > 0 ? (expectedReturn - riskFreeRate) / volatility : 0; - - return { - weights, - expectedReturn, - volatility, - sharpeRatio, - symbols: [] - }; -} - -/** - * Risk Parity Portfolio - */ -export function riskParityOptimization(covarianceMatrix: number[][]): PortfolioOptimizationResult { - const n = covarianceMatrix.length; - - // Start with equal weights - let weights = new Array(n).fill(1 / n); - - // Iterative optimization for equal risk contribution - const maxIterations = 100; - const tolerance = 1e-8; - - for (let iter = 0; iter < maxIterations; iter++) { - const riskContributions = calculateRiskContributions(weights, covarianceMatrix); - const totalRisk = Math.sqrt(calculatePortfolioVariance(weights, covarianceMatrix)); - const targetRiskContribution = totalRisk / n; - - let converged = true; - const newWeights = [...weights]; - - for (let i = 0; i < n; i++) { - const diff = riskContributions[i] - targetRiskContribution; - if (Math.abs(diff) > tolerance) { - converged = false; - // Simple adjustment - in production use proper optimization - newWeights[i] *= (1 - diff / totalRisk * 0.1); - } - } - - // Normalize weights - const sum = newWeights.reduce((s, w) => s + w, 0); - weights = newWeights.map(w => w / sum); - - if (converged) break; - } - - const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix); - const volatility = Math.sqrt(portfolioVariance); - - return { - weights, - expectedReturn: 0, // Not calculated for risk parity - volatility, - sharpeRatio: 0, - symbols: [] - }; -} - -/** - * Calculate risk contributions for each asset - */ -export function calculateRiskContributions( - weights: number[], - covarianceMatrix: number[][] -): number[] { - const n = weights.length; - const riskContributions: number[] = []; - - const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix); - const portfolioVolatility = Math.sqrt(portfolioVariance); - - for (let i = 0; i < n; i++) { - let marginalContribution = 0; - for (let j = 0; j < n; j++) { - marginalContribution += weights[j] * covarianceMatrix[i][j]; - } - - const riskContribution = (weights[i] * marginalContribution) / portfolioVolatility; - riskContributions.push(riskContribution); - } - - return riskContributions; -} - -/** - * Calculate portfolio variance - */ -export function calculatePortfolioVariance( - weights: number[], - covarianceMatrix: number[][] -): number { - const n = weights.length; - let variance = 0; - - for (let i = 0; i < n; i++) { - for (let j = 0; j < n; j++) { - variance += weights[i] * weights[j] * covarianceMatrix[i][j]; - } - } - - return variance; -} - -/** - * Portfolio rebalancing analysis - */ -export function calculateRebalancing( - currentPositions: PortfolioPosition[], - targetWeights: number[], - totalValue: number -): AssetAllocation[] { - if (currentPositions.length !== targetWeights.length) { - throw new Error('Number of positions must match number of target weights'); - } - - return currentPositions.map((position, index) => { - const currentWeight = position.value / totalValue; - const targetWeight = targetWeights[index]; - const difference = targetWeight - currentWeight; - const rebalanceAmount = difference * totalValue; - - return { - symbol: position.symbol, - targetWeight, - currentWeight, - difference, - rebalanceAmount - }; - }); -} - -/** - * Factor model analysis (Fama-French) - */ -export function famaFrenchAnalysis( - portfolioReturns: number[], - marketReturns: number[], - smbReturns: number[], // Small minus Big - hmlReturns: number[], // High minus Low - riskFreeRate: number = 0.02 -): { - alpha: number; - marketBeta: number; - sizeBeta: number; - valueBeta: number; - rSquared: number; -} { - const n = portfolioReturns.length; - - // Excess returns - const excessPortfolioReturns = portfolioReturns.map(r => r - riskFreeRate); - const excessMarketReturns = marketReturns.map(r => r - riskFreeRate); - - // Simple linear regression (in production, use proper multiple regression) - const meanExcessPortfolio = excessPortfolioReturns.reduce((sum, r) => sum + r, 0) / n; - const meanExcessMarket = excessMarketReturns.reduce((sum, r) => sum + r, 0) / n; - const meanSMB = smbReturns.reduce((sum, r) => sum + r, 0) / n; - const meanHML = hmlReturns.reduce((sum, r) => sum + r, 0) / n; - - // Calculate market beta - let covariance = 0; - let marketVariance = 0; - - for (let i = 0; i < n; i++) { - const portfolioDiff = excessPortfolioReturns[i] - meanExcessPortfolio; - const marketDiff = excessMarketReturns[i] - meanExcessMarket; - - covariance += portfolioDiff * marketDiff; - marketVariance += marketDiff * marketDiff; - } - - const marketBeta = marketVariance > 0 ? covariance / marketVariance : 0; - const alpha = meanExcessPortfolio - marketBeta * meanExcessMarket; - - return { - alpha, - marketBeta, - sizeBeta: 0, // Simplified - would need proper regression - valueBeta: 0, // Simplified - would need proper regression - rSquared: 0 // Simplified - would need proper regression - }; -} - -/** - * Portfolio performance attribution - */ -export function performanceAttribution( - portfolioReturns: number[], - benchmarkReturns: number[], - sectorWeights: number[][], - sectorReturns: number[][] -): { - totalActiveReturn: number; - allocationEffect: number; - selectionEffect: number; - interactionEffect: number; -} { - const n = portfolioReturns.length; - - const portfolioReturn = portfolioReturns.reduce((sum, r) => sum + r, 0) / n; - const benchmarkReturn = benchmarkReturns.reduce((sum, r) => sum + r, 0) / n; - const totalActiveReturn = portfolioReturn - benchmarkReturn; - - // Simplified attribution analysis - let allocationEffect = 0; - let selectionEffect = 0; - let interactionEffect = 0; - - // This would require proper implementation with sector-level analysis - // For now, return the total active return distributed equally - allocationEffect = totalActiveReturn * 0.4; - selectionEffect = totalActiveReturn * 0.4; - interactionEffect = totalActiveReturn * 0.2; - - return { - totalActiveReturn, - allocationEffect, - selectionEffect, - interactionEffect - }; -} - -/** - * Calculate Efficient Frontier points - */ -export function calculateEfficientFrontier( - returns: number[][], // Array of return series for each asset - symbols: string[], - riskFreeRate: number = 0.02, - numPoints: number = 50 -): Array<{ - weights: number[]; - expectedReturn: number; - volatility: number; - sharpeRatio: number; -}> { - if (returns.length !== symbols.length || returns.length < 2) return []; - - const n = returns.length; - const results: Array<{ weights: number[]; expectedReturn: number; volatility: number; sharpeRatio: number; }> = []; - - // Calculate expected returns and covariance matrix - const expectedReturns = returns.map(assetReturns => - assetReturns.reduce((sum, ret) => sum + ret, 0) / assetReturns.length - ); - - const covarianceMatrix = calculateCovarianceMatrix(returns); - - // Generate target returns from min to max expected return - const minReturn = Math.min(...expectedReturns); - const maxReturn = Math.max(...expectedReturns); - const returnStep = (maxReturn - minReturn) / (numPoints - 1); - - for (let i = 0; i < numPoints; i++) { - const targetReturn = minReturn + i * returnStep; - - // Find minimum variance portfolio for target return using quadratic programming (simplified) - const weights = findMinimumVarianceWeights(expectedReturns, covarianceMatrix, targetReturn); - - if (weights && weights.length === n) { - const portfolioReturn = weights.reduce((sum, w, j) => sum + w * expectedReturns[j], 0); - const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix); - const portfolioVolatility = Math.sqrt(portfolioVariance); - const sharpeRatio = portfolioVolatility > 0 ? (portfolioReturn - riskFreeRate) / portfolioVolatility : 0; - - results.push({ - weights, - expectedReturn: portfolioReturn, - volatility: portfolioVolatility, - sharpeRatio - }); - } - } - - return results.sort((a, b) => a.volatility - b.volatility); -} - -/** - * Find Minimum Variance Portfolio - */ -export function findMinimumVariancePortfolio( - returns: number[][], - symbols: string[] -): PortfolioOptimizationResult | null { - if (returns.length !== symbols.length || returns.length < 2) return null; - - const covarianceMatrix = calculateCovarianceMatrix(returns); - const n = returns.length; - - // For minimum variance portfolio: w = (Ξ£^-1 * 1) / (1' * Ξ£^-1 * 1) - // Simplified implementation using equal weights as starting point - const weights = new Array(n).fill(1 / n); - - // Iterative optimization (simplified) - for (let iter = 0; iter < 100; iter++) { - const gradient = calculateVarianceGradient(weights, covarianceMatrix); - const stepSize = 0.01; - - // Update weights - for (let i = 0; i < n; i++) { - weights[i] -= stepSize * gradient[i]; - } - - // Normalize weights to sum to 1 - const weightSum = weights.reduce((sum, w) => sum + w, 0); - for (let i = 0; i < n; i++) { - weights[i] = Math.max(0, weights[i] / weightSum); - } - } - - const expectedReturns = returns.map(assetReturns => - assetReturns.reduce((sum, ret) => sum + ret, 0) / assetReturns.length - ); - - const portfolioReturn = weights.reduce((sum, w, i) => sum + w * expectedReturns[i], 0); - const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix); - const portfolioVolatility = Math.sqrt(portfolioVariance); - const sharpeRatio = portfolioVolatility > 0 ? portfolioReturn / portfolioVolatility : 0; - - return { - weights, - expectedReturn: portfolioReturn, - volatility: portfolioVolatility, - sharpeRatio, - symbols - }; -} - -// Helper functions for portfolio optimization - -function calculateCovarianceMatrix(returns: number[][]): number[][] { - const n = returns.length; - const matrix: number[][] = []; - - for (let i = 0; i < n; i++) { - matrix[i] = []; - for (let j = 0; j < n; j++) { - matrix[i][j] = calculateCovariance(returns[i], returns[j]); - } - } - - return matrix; -} - -function calculateCovariance(x: number[], y: number[]): number { - if (x.length !== y.length || x.length < 2) return 0; - - const n = x.length; - const meanX = x.reduce((sum, val) => sum + val, 0) / n; - const meanY = y.reduce((sum, val) => sum + val, 0) / n; - - return x.reduce((sum, val, i) => sum + (val - meanX) * (y[i] - meanY), 0) / (n - 1); -} - -// calculatePortfolioVariance is already exported above - -function calculateVarianceGradient(weights: number[], covarianceMatrix: number[][]): number[] { - const n = weights.length; - const gradient: number[] = []; - - for (let i = 0; i < n; i++) { - let grad = 0; - for (let j = 0; j < n; j++) { - grad += 2 * weights[j] * covarianceMatrix[i][j]; - } - gradient[i] = grad; - } - - return gradient; -} - -function findMinimumVarianceWeights( - expectedReturns: number[], - covarianceMatrix: number[][], - targetReturn: number -): number[] | null { - const n = expectedReturns.length; - - // Simplified implementation - in practice would use quadratic programming solver - // Start with equal weights and adjust - const weights = new Array(n).fill(1 / n); - - // Iterative adjustment to meet target return constraint - for (let iter = 0; iter < 50; iter++) { - const currentReturn = weights.reduce((sum, w, i) => sum + w * expectedReturns[i], 0); - const returnDiff = targetReturn - currentReturn; - - if (Math.abs(returnDiff) < 0.001) break; - - // Adjust weights proportionally to expected returns - const totalExpectedReturn = expectedReturns.reduce((sum, r) => sum + Math.abs(r), 0); - - for (let i = 0; i < n; i++) { - const adjustment = (returnDiff * Math.abs(expectedReturns[i])) / totalExpectedReturn; - weights[i] = Math.max(0, weights[i] + adjustment * 0.1); - } - - // Normalize weights - const weightSum = weights.reduce((sum, w) => sum + w, 0); - if (weightSum > 0) { - for (let i = 0; i < n; i++) { - weights[i] /= weightSum; - } - } - } - - return weights; -} +/** + * Portfolio Analytics + * Advanced portfolio analysis and optimization tools + */ + +import { OHLCVData, PriceData } from './index'; + +export interface PortfolioPosition { + symbol: string; + shares: number; + price: number; + value: number; + weight: number; +} + +export interface PortfolioAnalysis { + totalValue: number; + totalReturn: number; + volatility: number; + sharpeRatio: number; + maxDrawdown: number; + var95: number; + beta: number; + alpha: number; + treynorRatio: number; + informationRatio: number; + trackingError: number; +} + +export interface AssetAllocation { + symbol: string; + targetWeight: number; + currentWeight: number; + difference: number; + rebalanceAmount: number; +} + +export interface PortfolioOptimizationResult { + weights: number[]; + expectedReturn: number; + volatility: number; + sharpeRatio: number; + symbols: string[]; +} + +/** + * Calculate portfolio value and weights + */ +export function calculatePortfolioMetrics(positions: PortfolioPosition[]): { + totalValue: number; + weights: number[]; + concentrationRisk: number; +} { + const totalValue = positions.reduce((sum, pos) => sum + pos.value, 0); + const weights = positions.map(pos => pos.value / totalValue); + + // Calculate Herfindahl-Hirschman Index for concentration risk + const concentrationRisk = weights.reduce((sum, weight) => sum + weight * weight, 0); + + return { + totalValue, + weights, + concentrationRisk + }; +} + +/** + * Calculate portfolio returns from position returns + */ +export function calculatePortfolioReturns( + assetReturns: number[][], + weights: number[] +): number[] { + if (assetReturns.length === 0 || weights.length !== assetReturns[0].length) { + return []; + } + + const portfolioReturns: number[] = []; + + for (let i = 0; i < assetReturns.length; i++) { + let portfolioReturn = 0; + for (let j = 0; j < weights.length; j++) { + portfolioReturn += weights[j] * assetReturns[i][j]; + } + portfolioReturns.push(portfolioReturn); + } + + return portfolioReturns; +} + +/** + * Mean-Variance Optimization (Markowitz) + */ +export function markowitzOptimization( + expectedReturns: number[], + covarianceMatrix: number[][], + riskFreeRate: number = 0.02, + riskAversion: number = 1 +): PortfolioOptimizationResult { + const n = expectedReturns.length; + + // Simplified optimization using equal weights as baseline + // In production, use proper quadratic programming solver + const weights = new Array(n).fill(1 / n); + + const expectedReturn = weights.reduce((sum, weight, i) => sum + weight * expectedReturns[i], 0); + + // Calculate portfolio variance + let portfolioVariance = 0; + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + portfolioVariance += weights[i] * weights[j] * covarianceMatrix[i][j]; + } + } + + const volatility = Math.sqrt(portfolioVariance); + const sharpeRatio = volatility > 0 ? (expectedReturn - riskFreeRate) / volatility : 0; + + return { + weights, + expectedReturn, + volatility, + sharpeRatio, + symbols: [] // Would be filled with actual symbols + }; +} + +/** + * Black-Litterman Model + */ +export function blackLittermanOptimization( + marketCaps: number[], + covarianceMatrix: number[][], + views: Array<{ assets: number[]; expectedReturn: number; confidence: number }>, + riskAversion: number = 3, + riskFreeRate: number = 0.02 +): PortfolioOptimizationResult { + const n = marketCaps.length; + + // Calculate market weights + const totalMarketCap = marketCaps.reduce((sum, cap) => sum + cap, 0); + const marketWeights = marketCaps.map(cap => cap / totalMarketCap); + + // Implied equilibrium returns + const equilibriumReturns: number[] = []; + for (let i = 0; i < n; i++) { + let equilibriumReturn = 0; + for (let j = 0; j < n; j++) { + equilibriumReturn += riskAversion * covarianceMatrix[i][j] * marketWeights[j]; + } + equilibriumReturns.push(equilibriumReturn); + } + + // Simplified BL implementation - in production use proper matrix operations + const weights = [...marketWeights]; // Start with market weights + + const expectedReturn = weights.reduce((sum, weight, i) => sum + weight * equilibriumReturns[i], 0); + + let portfolioVariance = 0; + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + portfolioVariance += weights[i] * weights[j] * covarianceMatrix[i][j]; + } + } + + const volatility = Math.sqrt(portfolioVariance); + const sharpeRatio = volatility > 0 ? (expectedReturn - riskFreeRate) / volatility : 0; + + return { + weights, + expectedReturn, + volatility, + sharpeRatio, + symbols: [] + }; +} + +/** + * Risk Parity Portfolio + */ +export function riskParityOptimization(covarianceMatrix: number[][]): PortfolioOptimizationResult { + const n = covarianceMatrix.length; + + // Start with equal weights + let weights = new Array(n).fill(1 / n); + + // Iterative optimization for equal risk contribution + const maxIterations = 100; + const tolerance = 1e-8; + + for (let iter = 0; iter < maxIterations; iter++) { + const riskContributions = calculateRiskContributions(weights, covarianceMatrix); + const totalRisk = Math.sqrt(calculatePortfolioVariance(weights, covarianceMatrix)); + const targetRiskContribution = totalRisk / n; + + let converged = true; + const newWeights = [...weights]; + + for (let i = 0; i < n; i++) { + const diff = riskContributions[i] - targetRiskContribution; + if (Math.abs(diff) > tolerance) { + converged = false; + // Simple adjustment - in production use proper optimization + newWeights[i] *= (1 - diff / totalRisk * 0.1); + } + } + + // Normalize weights + const sum = newWeights.reduce((s, w) => s + w, 0); + weights = newWeights.map(w => w / sum); + + if (converged) break; + } + + const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix); + const volatility = Math.sqrt(portfolioVariance); + + return { + weights, + expectedReturn: 0, // Not calculated for risk parity + volatility, + sharpeRatio: 0, + symbols: [] + }; +} + +/** + * Calculate risk contributions for each asset + */ +export function calculateRiskContributions( + weights: number[], + covarianceMatrix: number[][] +): number[] { + const n = weights.length; + const riskContributions: number[] = []; + + const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix); + const portfolioVolatility = Math.sqrt(portfolioVariance); + + for (let i = 0; i < n; i++) { + let marginalContribution = 0; + for (let j = 0; j < n; j++) { + marginalContribution += weights[j] * covarianceMatrix[i][j]; + } + + const riskContribution = (weights[i] * marginalContribution) / portfolioVolatility; + riskContributions.push(riskContribution); + } + + return riskContributions; +} + +/** + * Calculate portfolio variance + */ +export function calculatePortfolioVariance( + weights: number[], + covarianceMatrix: number[][] +): number { + const n = weights.length; + let variance = 0; + + for (let i = 0; i < n; i++) { + for (let j = 0; j < n; j++) { + variance += weights[i] * weights[j] * covarianceMatrix[i][j]; + } + } + + return variance; +} + +/** + * Portfolio rebalancing analysis + */ +export function calculateRebalancing( + currentPositions: PortfolioPosition[], + targetWeights: number[], + totalValue: number +): AssetAllocation[] { + if (currentPositions.length !== targetWeights.length) { + throw new Error('Number of positions must match number of target weights'); + } + + return currentPositions.map((position, index) => { + const currentWeight = position.value / totalValue; + const targetWeight = targetWeights[index]; + const difference = targetWeight - currentWeight; + const rebalanceAmount = difference * totalValue; + + return { + symbol: position.symbol, + targetWeight, + currentWeight, + difference, + rebalanceAmount + }; + }); +} + +/** + * Factor model analysis (Fama-French) + */ +export function famaFrenchAnalysis( + portfolioReturns: number[], + marketReturns: number[], + smbReturns: number[], // Small minus Big + hmlReturns: number[], // High minus Low + riskFreeRate: number = 0.02 +): { + alpha: number; + marketBeta: number; + sizeBeta: number; + valueBeta: number; + rSquared: number; +} { + const n = portfolioReturns.length; + + // Excess returns + const excessPortfolioReturns = portfolioReturns.map(r => r - riskFreeRate); + const excessMarketReturns = marketReturns.map(r => r - riskFreeRate); + + // Simple linear regression (in production, use proper multiple regression) + const meanExcessPortfolio = excessPortfolioReturns.reduce((sum, r) => sum + r, 0) / n; + const meanExcessMarket = excessMarketReturns.reduce((sum, r) => sum + r, 0) / n; + const meanSMB = smbReturns.reduce((sum, r) => sum + r, 0) / n; + const meanHML = hmlReturns.reduce((sum, r) => sum + r, 0) / n; + + // Calculate market beta + let covariance = 0; + let marketVariance = 0; + + for (let i = 0; i < n; i++) { + const portfolioDiff = excessPortfolioReturns[i] - meanExcessPortfolio; + const marketDiff = excessMarketReturns[i] - meanExcessMarket; + + covariance += portfolioDiff * marketDiff; + marketVariance += marketDiff * marketDiff; + } + + const marketBeta = marketVariance > 0 ? covariance / marketVariance : 0; + const alpha = meanExcessPortfolio - marketBeta * meanExcessMarket; + + return { + alpha, + marketBeta, + sizeBeta: 0, // Simplified - would need proper regression + valueBeta: 0, // Simplified - would need proper regression + rSquared: 0 // Simplified - would need proper regression + }; +} + +/** + * Portfolio performance attribution + */ +export function performanceAttribution( + portfolioReturns: number[], + benchmarkReturns: number[], + sectorWeights: number[][], + sectorReturns: number[][] +): { + totalActiveReturn: number; + allocationEffect: number; + selectionEffect: number; + interactionEffect: number; +} { + const n = portfolioReturns.length; + + const portfolioReturn = portfolioReturns.reduce((sum, r) => sum + r, 0) / n; + const benchmarkReturn = benchmarkReturns.reduce((sum, r) => sum + r, 0) / n; + const totalActiveReturn = portfolioReturn - benchmarkReturn; + + // Simplified attribution analysis + let allocationEffect = 0; + let selectionEffect = 0; + let interactionEffect = 0; + + // This would require proper implementation with sector-level analysis + // For now, return the total active return distributed equally + allocationEffect = totalActiveReturn * 0.4; + selectionEffect = totalActiveReturn * 0.4; + interactionEffect = totalActiveReturn * 0.2; + + return { + totalActiveReturn, + allocationEffect, + selectionEffect, + interactionEffect + }; +} + +/** + * Calculate Efficient Frontier points + */ +export function calculateEfficientFrontier( + returns: number[][], // Array of return series for each asset + symbols: string[], + riskFreeRate: number = 0.02, + numPoints: number = 50 +): Array<{ + weights: number[]; + expectedReturn: number; + volatility: number; + sharpeRatio: number; +}> { + if (returns.length !== symbols.length || returns.length < 2) return []; + + const n = returns.length; + const results: Array<{ weights: number[]; expectedReturn: number; volatility: number; sharpeRatio: number; }> = []; + + // Calculate expected returns and covariance matrix + const expectedReturns = returns.map(assetReturns => + assetReturns.reduce((sum, ret) => sum + ret, 0) / assetReturns.length + ); + + const covarianceMatrix = calculateCovarianceMatrix(returns); + + // Generate target returns from min to max expected return + const minReturn = Math.min(...expectedReturns); + const maxReturn = Math.max(...expectedReturns); + const returnStep = (maxReturn - minReturn) / (numPoints - 1); + + for (let i = 0; i < numPoints; i++) { + const targetReturn = minReturn + i * returnStep; + + // Find minimum variance portfolio for target return using quadratic programming (simplified) + const weights = findMinimumVarianceWeights(expectedReturns, covarianceMatrix, targetReturn); + + if (weights && weights.length === n) { + const portfolioReturn = weights.reduce((sum, w, j) => sum + w * expectedReturns[j], 0); + const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix); + const portfolioVolatility = Math.sqrt(portfolioVariance); + const sharpeRatio = portfolioVolatility > 0 ? (portfolioReturn - riskFreeRate) / portfolioVolatility : 0; + + results.push({ + weights, + expectedReturn: portfolioReturn, + volatility: portfolioVolatility, + sharpeRatio + }); + } + } + + return results.sort((a, b) => a.volatility - b.volatility); +} + +/** + * Find Minimum Variance Portfolio + */ +export function findMinimumVariancePortfolio( + returns: number[][], + symbols: string[] +): PortfolioOptimizationResult | null { + if (returns.length !== symbols.length || returns.length < 2) return null; + + const covarianceMatrix = calculateCovarianceMatrix(returns); + const n = returns.length; + + // For minimum variance portfolio: w = (Ξ£^-1 * 1) / (1' * Ξ£^-1 * 1) + // Simplified implementation using equal weights as starting point + const weights = new Array(n).fill(1 / n); + + // Iterative optimization (simplified) + for (let iter = 0; iter < 100; iter++) { + const gradient = calculateVarianceGradient(weights, covarianceMatrix); + const stepSize = 0.01; + + // Update weights + for (let i = 0; i < n; i++) { + weights[i] -= stepSize * gradient[i]; + } + + // Normalize weights to sum to 1 + const weightSum = weights.reduce((sum, w) => sum + w, 0); + for (let i = 0; i < n; i++) { + weights[i] = Math.max(0, weights[i] / weightSum); + } + } + + const expectedReturns = returns.map(assetReturns => + assetReturns.reduce((sum, ret) => sum + ret, 0) / assetReturns.length + ); + + const portfolioReturn = weights.reduce((sum, w, i) => sum + w * expectedReturns[i], 0); + const portfolioVariance = calculatePortfolioVariance(weights, covarianceMatrix); + const portfolioVolatility = Math.sqrt(portfolioVariance); + const sharpeRatio = portfolioVolatility > 0 ? portfolioReturn / portfolioVolatility : 0; + + return { + weights, + expectedReturn: portfolioReturn, + volatility: portfolioVolatility, + sharpeRatio, + symbols + }; +} + +// Helper functions for portfolio optimization + +function calculateCovarianceMatrix(returns: number[][]): number[][] { + const n = returns.length; + const matrix: number[][] = []; + + for (let i = 0; i < n; i++) { + matrix[i] = []; + for (let j = 0; j < n; j++) { + matrix[i][j] = calculateCovariance(returns[i], returns[j]); + } + } + + return matrix; +} + +function calculateCovariance(x: number[], y: number[]): number { + if (x.length !== y.length || x.length < 2) return 0; + + const n = x.length; + const meanX = x.reduce((sum, val) => sum + val, 0) / n; + const meanY = y.reduce((sum, val) => sum + val, 0) / n; + + return x.reduce((sum, val, i) => sum + (val - meanX) * (y[i] - meanY), 0) / (n - 1); +} + +// calculatePortfolioVariance is already exported above + +function calculateVarianceGradient(weights: number[], covarianceMatrix: number[][]): number[] { + const n = weights.length; + const gradient: number[] = []; + + for (let i = 0; i < n; i++) { + let grad = 0; + for (let j = 0; j < n; j++) { + grad += 2 * weights[j] * covarianceMatrix[i][j]; + } + gradient[i] = grad; + } + + return gradient; +} + +function findMinimumVarianceWeights( + expectedReturns: number[], + covarianceMatrix: number[][], + targetReturn: number +): number[] | null { + const n = expectedReturns.length; + + // Simplified implementation - in practice would use quadratic programming solver + // Start with equal weights and adjust + const weights = new Array(n).fill(1 / n); + + // Iterative adjustment to meet target return constraint + for (let iter = 0; iter < 50; iter++) { + const currentReturn = weights.reduce((sum, w, i) => sum + w * expectedReturns[i], 0); + const returnDiff = targetReturn - currentReturn; + + if (Math.abs(returnDiff) < 0.001) break; + + // Adjust weights proportionally to expected returns + const totalExpectedReturn = expectedReturns.reduce((sum, r) => sum + Math.abs(r), 0); + + for (let i = 0; i < n; i++) { + const adjustment = (returnDiff * Math.abs(expectedReturns[i])) / totalExpectedReturn; + weights[i] = Math.max(0, weights[i] + adjustment * 0.1); + } + + // Normalize weights + const weightSum = weights.reduce((sum, w) => sum + w, 0); + if (weightSum > 0) { + for (let i = 0; i < n; i++) { + weights[i] /= weightSum; + } + } + } + + return weights; +} diff --git a/libs/utils/src/calculations/position-sizing.ts b/libs/utils/src/calculations/position-sizing.ts index e82775f..32a5445 100644 --- a/libs/utils/src/calculations/position-sizing.ts +++ b/libs/utils/src/calculations/position-sizing.ts @@ -1,524 +1,524 @@ -/** - * Position Sizing Calculations - * Risk-based position sizing methods for trading strategies - */ - -export interface PositionSizeParams { - accountSize: number; - riskPercentage: number; - entryPrice: number; - stopLoss: number; - leverage?: number; -} - -export interface KellyParams { - winRate: number; - averageWin: number; - averageLoss: number; -} - -export interface VolatilityParams { - price: number; - volatility: number; - targetVolatility: number; - lookbackDays: number; -} - -/** - * Calculate position size based on fixed risk percentage - */ -export function fixedRiskPositionSize(params: PositionSizeParams): number { - const { accountSize, riskPercentage, entryPrice, stopLoss, leverage = 1 } = params; - - // Input validation - if (accountSize <= 0 || riskPercentage <= 0 || entryPrice <= 0 || leverage <= 0) return 0; - if (entryPrice === stopLoss) return 0; - - const riskAmount = accountSize * (riskPercentage / 100); - const riskPerShare = Math.abs(entryPrice - stopLoss); - const basePositionSize = riskAmount / riskPerShare; - - return Math.floor(basePositionSize * leverage); -} - -/** - * Calculate position size using Kelly Criterion - */ -export function kellyPositionSize(params: KellyParams, accountSize: number): number { - const { winRate, averageWin, averageLoss } = params; - - // Validate inputs - if (averageLoss === 0 || winRate <= 0 || winRate >= 1 || averageWin <= 0) return 0; - - const lossRate = 1 - winRate; - const winLossRatio = averageWin / Math.abs(averageLoss); - - // Correct Kelly formula: f = (bp - q) / b - // where: b = win/loss ratio, p = win rate, q = loss rate - const kellyFraction = (winRate * winLossRatio - lossRate) / winLossRatio; - - // Cap Kelly fraction to prevent over-leveraging (max 25% of Kelly recommendation) - const cappedKelly = Math.max(0, Math.min(kellyFraction * 0.25, 0.25)); - - return accountSize * cappedKelly; -} - -/** - * Calculate fractional Kelly position size (more conservative) - */ -export function fractionalKellyPositionSize( - params: KellyParams, - accountSize: number, - fraction: number = 0.25 -): number { - // Input validation - if (fraction <= 0 || fraction > 1) return 0; - - const fullKelly = kellyPositionSize(params, accountSize); - return fullKelly * fraction; -} - -/** - * Calculate position size based on volatility targeting - */ -export function volatilityTargetPositionSize(params: VolatilityParams, accountSize: number): number { - const { price, volatility, targetVolatility } = params; - - // Input validation - if (volatility <= 0 || price <= 0 || targetVolatility <= 0 || accountSize <= 0) return 0; - - const volatilityRatio = targetVolatility / volatility; - const basePositionValue = accountSize * Math.min(volatilityRatio, 2); // Cap at 2x leverage - - return Math.floor(basePositionValue / price); -} - -/** - * Calculate equal weight position size - */ -export function equalWeightPositionSize( - accountSize: number, - numberOfPositions: number, - price: number -): number { - // Input validation - if (numberOfPositions <= 0 || price <= 0 || accountSize <= 0) return 0; - - const positionValue = accountSize / numberOfPositions; - return Math.floor(positionValue / price); -} - -/** - * Calculate position size based on Average True Range (ATR) - */ -export function atrBasedPositionSize( - accountSize: number, - riskPercentage: number, - atrValue: number, - atrMultiplier: number = 2, - price: number -): number { - if (atrValue === 0 || price === 0) return 0; - - const riskAmount = accountSize * (riskPercentage / 100); - const stopDistance = atrValue * atrMultiplier; - const positionSize = riskAmount / stopDistance; - - // Return position size in shares, not dollars - return Math.floor(positionSize); -} - -/** - * Calculate position size using Van Tharp's expectancy - */ -export function expectancyPositionSize( - accountSize: number, - winRate: number, - averageWin: number, - averageLoss: number, - maxRiskPercentage: number = 2 -): number { - // Input validation - if (accountSize <= 0 || winRate <= 0 || winRate >= 1 || averageWin <= 0 || averageLoss === 0) return 0; - - const expectancy = (winRate * averageWin) - ((1 - winRate) * Math.abs(averageLoss)); - - if (expectancy <= 0) return 0; - - // Scale position size based on expectancy relative to average loss - // Higher expectancy relative to risk allows for larger position - const expectancyRatio = expectancy / Math.abs(averageLoss); - const riskPercentage = Math.min(expectancyRatio * 0.5, maxRiskPercentage); - - const positionValue = accountSize * (riskPercentage / 100); - return positionValue; -} - -/** - * Calculate optimal position size using Monte Carlo simulation - */ -export function monteCarloPositionSize( - accountSize: number, - historicalReturns: number[], - simulations: number = 1000, - confidenceLevel: number = 0.95 -): number { - if (historicalReturns.length === 0) return 0; - - const outcomes: number[] = []; - const mean = historicalReturns.reduce((sum, ret) => sum + ret, 0) / historicalReturns.length; - const variance = historicalReturns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / historicalReturns.length; - const stdDev = Math.sqrt(variance); - - // Test different position sizes (as fraction of account) - const testFractions = [0.01, 0.025, 0.05, 0.075, 0.1, 0.15, 0.2, 0.25]; - let optimalFraction = 0; - let bestSharpe = -Infinity; - - for (const fraction of testFractions) { - const simOutcomes: number[] = []; - - for (let i = 0; i < simulations; i++) { - let portfolioValue = accountSize; - - // Simulate trades over a period - for (let j = 0; j < 50; j++) { // 50 trades - const randomReturn = historicalReturns[Math.floor(Math.random() * historicalReturns.length)]; - const positionReturn = randomReturn * fraction; - portfolioValue = portfolioValue * (1 + positionReturn); - } - - simOutcomes.push(portfolioValue); - } - - // Calculate Sharpe ratio for this fraction - const avgOutcome = simOutcomes.reduce((sum, val) => sum + val, 0) / simOutcomes.length; - const returns = simOutcomes.map(val => (val - accountSize) / accountSize); - const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const returnStdDev = Math.sqrt(returns.reduce((sum, ret) => sum + Math.pow(ret - avgReturn, 2), 0) / returns.length); - - const sharpe = returnStdDev > 0 ? avgReturn / returnStdDev : -Infinity; - - if (sharpe > bestSharpe) { - bestSharpe = sharpe; - optimalFraction = fraction; - } - } - - return accountSize * optimalFraction; -} - -/** - * Calculate position size based on Sharpe ratio optimization - */ -export function sharpeOptimizedPositionSize( - accountSize: number, - expectedReturn: number, - volatility: number, - riskFreeRate: number = 0.02, - maxLeverage: number = 3 -): number { - // Input validation - if (volatility <= 0 || accountSize <= 0 || expectedReturn <= riskFreeRate || maxLeverage <= 0) return 0; - // Kelly criterion with Sharpe ratio optimization - const excessReturn = expectedReturn - riskFreeRate; - const kellyFraction = excessReturn / (volatility * volatility); - - // Apply maximum leverage constraint - const constrainedFraction = Math.max(0, Math.min(kellyFraction, maxLeverage)); - - return accountSize * constrainedFraction; -} - -/** - * Fixed fractional position sizing - */ -export function fixedFractionalPositionSize( - accountSize: number, - riskPercentage: number, - stopLossPercentage: number, - price: number -): number { - // Input validation - if (stopLossPercentage <= 0 || price <= 0 || riskPercentage <= 0 || accountSize <= 0) return 0; - - const riskAmount = accountSize * (riskPercentage / 100); - const stopLossAmount = price * (stopLossPercentage / 100); - - return Math.floor(riskAmount / stopLossAmount); -} - -/** - * Volatility-adjusted position sizing - */ -export function volatilityAdjustedPositionSize( - accountSize: number, - targetVolatility: number, - assetVolatility: number, - price: number -): number { - // Input validation - if (assetVolatility <= 0 || price <= 0 || targetVolatility <= 0 || accountSize <= 0) return 0; - - const volatilityRatio = targetVolatility / assetVolatility; - const cappedRatio = Math.min(volatilityRatio, 3); // Cap at 3x leverage - const positionValue = accountSize * cappedRatio; - - return Math.floor(positionValue / price); -} - -/** - * Calculate position size with correlation adjustment - */ -export function correlationAdjustedPositionSize( - basePositionSize: number, - existingPositions: Array<{ size: number; correlation: number }>, - maxCorrelationRisk: number = 0.3 -): number { - if (existingPositions.length === 0 || basePositionSize <= 0) return basePositionSize; - - // Calculate portfolio correlation risk - // This should consider the correlation between the new position and existing ones - const totalCorrelationRisk = existingPositions.reduce((total, position) => { - // Weight correlation by position size relative to new position - const relativeSize = position.size / (basePositionSize + position.size); - return total + (relativeSize * Math.abs(position.correlation)); - }, 0); - - // Adjust position size based on correlation risk - const correlationAdjustment = Math.max(0.1, 1 - (totalCorrelationRisk / maxCorrelationRisk)); - - return Math.floor(basePositionSize * correlationAdjustment); -} - -/** - * Calculate portfolio heat (total risk across all positions) - */ -export function calculatePortfolioHeat( - positions: Array<{ value: number; risk: number }>, - accountSize: number -): number { - // Input validation - if (accountSize <= 0 || positions.length === 0) return 0; - - const totalRisk = positions.reduce((sum, position) => { - // Ensure risk values are positive - return sum + Math.max(0, position.risk); - }, 0); - - return Math.min((totalRisk / accountSize) * 100, 100); // Cap at 100% -} - -/** - * Dynamic position sizing based on market conditions - */ -export function dynamicPositionSize( - basePositionSize: number, - marketVolatility: number, - normalVolatility: number, - drawdownLevel: number, - maxDrawdownThreshold: number = 0.1 -): number { - // Input validation - if (basePositionSize <= 0 || marketVolatility <= 0 || normalVolatility <= 0) return 0; - if (drawdownLevel < 0 || maxDrawdownThreshold <= 0) return basePositionSize; - - // Volatility adjustment - reduce size when volatility is high - const volatilityAdjustment = Math.min(normalVolatility / marketVolatility, 2); // Cap at 2x - - // Drawdown adjustment - reduce size as drawdown increases - const normalizedDrawdown = Math.min(drawdownLevel / maxDrawdownThreshold, 1); - const drawdownAdjustment = Math.max(0.1, 1 - normalizedDrawdown); - - const adjustedSize = basePositionSize * volatilityAdjustment * drawdownAdjustment; - return Math.floor(Math.max(0, adjustedSize)); -} - -/** - * Calculate maximum position size based on liquidity - */ -export function liquidityConstrainedPositionSize( - desiredPositionSize: number, - averageDailyVolume: number, - maxVolumePercentage: number = 0.05, - price: number -): number { - if (averageDailyVolume === 0 || price === 0) return 0; - - const maxShares = averageDailyVolume * maxVolumePercentage; - - return Math.min(desiredPositionSize, maxShares); -} - -/** - * Multi-timeframe position sizing - */ -export function multiTimeframePositionSize( - accountSize: number, - shortTermSignal: number, // -1 to 1 - mediumTermSignal: number, // -1 to 1 - longTermSignal: number, // -1 to 1 - baseRiskPercentage: number = 1 -): number { - // Input validation - if (accountSize <= 0 || baseRiskPercentage <= 0) return 0; - - // Clamp signals to valid range - const clampedShort = Math.max(-1, Math.min(1, shortTermSignal)); - const clampedMedium = Math.max(-1, Math.min(1, mediumTermSignal)); - const clampedLong = Math.max(-1, Math.min(1, longTermSignal)); - - // Weight the signals (long-term gets higher weight) - const weightedSignal = ( - clampedShort * 0.2 + - clampedMedium * 0.3 + - clampedLong * 0.5 - ); - - // Adjust risk based on signal strength - const adjustedRisk = baseRiskPercentage * Math.abs(weightedSignal); - - return accountSize * (adjustedRisk / 100); -} - -/** - * Risk parity position sizing - */ -export function riskParityPositionSize( - assets: Array<{ volatility: number; price: number }>, - targetRisk: number, - accountSize: number -): number[] { - if (assets.length === 0) return []; - - // Calculate inverse volatility weights - const totalInverseVol = assets.reduce((sum, asset) => { - if (asset.volatility === 0) return sum; - return sum + (1 / asset.volatility); - }, 0); - - if (totalInverseVol === 0) return assets.map(() => 0); - - return assets.map(asset => { - if (asset.volatility === 0 || asset.price === 0) return 0; - // Calculate weight based on inverse volatility - const weight = (1 / asset.volatility) / totalInverseVol; - - // The weight itself already accounts for risk parity - // We just need to scale by target risk once - const positionValue = accountSize * weight * targetRisk; - return Math.floor(positionValue / asset.price); - }); -} - -/** - * Validate position size against risk limits - */ -export function validatePositionSize( - positionSize: number, - price: number, - accountSize: number, - maxPositionPercentage: number = 10, - maxLeverage: number = 1 -): { isValid: boolean; adjustedSize: number; violations: string[] } { - const violations: string[] = []; - let adjustedSize = positionSize; - - // Check maximum position percentage - const positionValue = positionSize * price; - const positionPercentage = (positionValue / accountSize) * 100; - - if (positionPercentage > maxPositionPercentage) { - violations.push(`Position exceeds maximum ${maxPositionPercentage}% of account`); - adjustedSize = (accountSize * maxPositionPercentage / 100) / price; - } - - // Check leverage limits - const leverage = positionValue / accountSize; - if (leverage > maxLeverage) { - violations.push(`Position exceeds maximum leverage of ${maxLeverage}x`); - adjustedSize = Math.min(adjustedSize, (accountSize * maxLeverage) / price); - } - - // Check minimum position size - if (adjustedSize < 1 && adjustedSize > 0) { - violations.push('Position size too small (less than 1 share)'); - adjustedSize = 0; - } - - return { - isValid: violations.length === 0, - adjustedSize: Math.max(0, adjustedSize), - violations - }; -} - -/** - * Optimal F position sizing (Ralph Vince's method) - */ -export function optimalFPositionSize( - accountSize: number, - historicalReturns: number[], - maxIterations: number = 100 -): number { - if (historicalReturns.length === 0 || accountSize <= 0) return 0; - - // Convert returns to P&L per unit - const pnlValues = historicalReturns.map(ret => ret * 1000); // Assuming $1000 per unit - - let bestF = 0; - let bestTWR = 0; // Terminal Wealth Relative - - // Test different f values (0.01 to 1.00) - for (let f = 0.01; f <= 1.0; f += 0.01) { - let twr = 1.0; - let valid = true; - - for (const pnl of pnlValues) { - const hpr = 1 + (f * pnl / 1000); // Holding Period Return - - if (hpr <= 0) { - valid = false; - break; - } - - twr *= hpr; - } - - if (valid && twr > bestTWR) { - bestTWR = twr; - bestF = f; - } - } - - // Apply safety factor - const safeF = bestF * 0.75; // 75% of optimal f for safety - - return accountSize * safeF; -} - -/** - * Secure F position sizing (safer version of Optimal F) - */ -export function secureFPositionSize( - accountSize: number, - historicalReturns: number[], - confidenceLevel: number = 0.95 -): number { - if (historicalReturns.length === 0 || accountSize <= 0) return 0; - - // Sort returns to find worst-case scenarios - const sortedReturns = [...historicalReturns].sort((a, b) => a - b); - const worstCaseIndex = Math.floor((1 - confidenceLevel) * sortedReturns.length); - const worstCaseReturn = sortedReturns[worstCaseIndex]; - - // Calculate maximum position size that won't bankrupt at confidence level - const maxLoss = Math.abs(worstCaseReturn); - const maxRiskPercentage = 0.02; // Never risk more than 2% on worst case - - if (maxLoss === 0) return accountSize * 0.1; // Default to 10% if no historical losses - - const secureF = Math.min(maxRiskPercentage / maxLoss, 0.25); // Cap at 25% - - return accountSize * secureF; -} +/** + * Position Sizing Calculations + * Risk-based position sizing methods for trading strategies + */ + +export interface PositionSizeParams { + accountSize: number; + riskPercentage: number; + entryPrice: number; + stopLoss: number; + leverage?: number; +} + +export interface KellyParams { + winRate: number; + averageWin: number; + averageLoss: number; +} + +export interface VolatilityParams { + price: number; + volatility: number; + targetVolatility: number; + lookbackDays: number; +} + +/** + * Calculate position size based on fixed risk percentage + */ +export function fixedRiskPositionSize(params: PositionSizeParams): number { + const { accountSize, riskPercentage, entryPrice, stopLoss, leverage = 1 } = params; + + // Input validation + if (accountSize <= 0 || riskPercentage <= 0 || entryPrice <= 0 || leverage <= 0) return 0; + if (entryPrice === stopLoss) return 0; + + const riskAmount = accountSize * (riskPercentage / 100); + const riskPerShare = Math.abs(entryPrice - stopLoss); + const basePositionSize = riskAmount / riskPerShare; + + return Math.floor(basePositionSize * leverage); +} + +/** + * Calculate position size using Kelly Criterion + */ +export function kellyPositionSize(params: KellyParams, accountSize: number): number { + const { winRate, averageWin, averageLoss } = params; + + // Validate inputs + if (averageLoss === 0 || winRate <= 0 || winRate >= 1 || averageWin <= 0) return 0; + + const lossRate = 1 - winRate; + const winLossRatio = averageWin / Math.abs(averageLoss); + + // Correct Kelly formula: f = (bp - q) / b + // where: b = win/loss ratio, p = win rate, q = loss rate + const kellyFraction = (winRate * winLossRatio - lossRate) / winLossRatio; + + // Cap Kelly fraction to prevent over-leveraging (max 25% of Kelly recommendation) + const cappedKelly = Math.max(0, Math.min(kellyFraction * 0.25, 0.25)); + + return accountSize * cappedKelly; +} + +/** + * Calculate fractional Kelly position size (more conservative) + */ +export function fractionalKellyPositionSize( + params: KellyParams, + accountSize: number, + fraction: number = 0.25 +): number { + // Input validation + if (fraction <= 0 || fraction > 1) return 0; + + const fullKelly = kellyPositionSize(params, accountSize); + return fullKelly * fraction; +} + +/** + * Calculate position size based on volatility targeting + */ +export function volatilityTargetPositionSize(params: VolatilityParams, accountSize: number): number { + const { price, volatility, targetVolatility } = params; + + // Input validation + if (volatility <= 0 || price <= 0 || targetVolatility <= 0 || accountSize <= 0) return 0; + + const volatilityRatio = targetVolatility / volatility; + const basePositionValue = accountSize * Math.min(volatilityRatio, 2); // Cap at 2x leverage + + return Math.floor(basePositionValue / price); +} + +/** + * Calculate equal weight position size + */ +export function equalWeightPositionSize( + accountSize: number, + numberOfPositions: number, + price: number +): number { + // Input validation + if (numberOfPositions <= 0 || price <= 0 || accountSize <= 0) return 0; + + const positionValue = accountSize / numberOfPositions; + return Math.floor(positionValue / price); +} + +/** + * Calculate position size based on Average True Range (ATR) + */ +export function atrBasedPositionSize( + accountSize: number, + riskPercentage: number, + atrValue: number, + atrMultiplier: number = 2, + price: number +): number { + if (atrValue === 0 || price === 0) return 0; + + const riskAmount = accountSize * (riskPercentage / 100); + const stopDistance = atrValue * atrMultiplier; + const positionSize = riskAmount / stopDistance; + + // Return position size in shares, not dollars + return Math.floor(positionSize); +} + +/** + * Calculate position size using Van Tharp's expectancy + */ +export function expectancyPositionSize( + accountSize: number, + winRate: number, + averageWin: number, + averageLoss: number, + maxRiskPercentage: number = 2 +): number { + // Input validation + if (accountSize <= 0 || winRate <= 0 || winRate >= 1 || averageWin <= 0 || averageLoss === 0) return 0; + + const expectancy = (winRate * averageWin) - ((1 - winRate) * Math.abs(averageLoss)); + + if (expectancy <= 0) return 0; + + // Scale position size based on expectancy relative to average loss + // Higher expectancy relative to risk allows for larger position + const expectancyRatio = expectancy / Math.abs(averageLoss); + const riskPercentage = Math.min(expectancyRatio * 0.5, maxRiskPercentage); + + const positionValue = accountSize * (riskPercentage / 100); + return positionValue; +} + +/** + * Calculate optimal position size using Monte Carlo simulation + */ +export function monteCarloPositionSize( + accountSize: number, + historicalReturns: number[], + simulations: number = 1000, + confidenceLevel: number = 0.95 +): number { + if (historicalReturns.length === 0) return 0; + + const outcomes: number[] = []; + const mean = historicalReturns.reduce((sum, ret) => sum + ret, 0) / historicalReturns.length; + const variance = historicalReturns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / historicalReturns.length; + const stdDev = Math.sqrt(variance); + + // Test different position sizes (as fraction of account) + const testFractions = [0.01, 0.025, 0.05, 0.075, 0.1, 0.15, 0.2, 0.25]; + let optimalFraction = 0; + let bestSharpe = -Infinity; + + for (const fraction of testFractions) { + const simOutcomes: number[] = []; + + for (let i = 0; i < simulations; i++) { + let portfolioValue = accountSize; + + // Simulate trades over a period + for (let j = 0; j < 50; j++) { // 50 trades + const randomReturn = historicalReturns[Math.floor(Math.random() * historicalReturns.length)]; + const positionReturn = randomReturn * fraction; + portfolioValue = portfolioValue * (1 + positionReturn); + } + + simOutcomes.push(portfolioValue); + } + + // Calculate Sharpe ratio for this fraction + const avgOutcome = simOutcomes.reduce((sum, val) => sum + val, 0) / simOutcomes.length; + const returns = simOutcomes.map(val => (val - accountSize) / accountSize); + const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const returnStdDev = Math.sqrt(returns.reduce((sum, ret) => sum + Math.pow(ret - avgReturn, 2), 0) / returns.length); + + const sharpe = returnStdDev > 0 ? avgReturn / returnStdDev : -Infinity; + + if (sharpe > bestSharpe) { + bestSharpe = sharpe; + optimalFraction = fraction; + } + } + + return accountSize * optimalFraction; +} + +/** + * Calculate position size based on Sharpe ratio optimization + */ +export function sharpeOptimizedPositionSize( + accountSize: number, + expectedReturn: number, + volatility: number, + riskFreeRate: number = 0.02, + maxLeverage: number = 3 +): number { + // Input validation + if (volatility <= 0 || accountSize <= 0 || expectedReturn <= riskFreeRate || maxLeverage <= 0) return 0; + // Kelly criterion with Sharpe ratio optimization + const excessReturn = expectedReturn - riskFreeRate; + const kellyFraction = excessReturn / (volatility * volatility); + + // Apply maximum leverage constraint + const constrainedFraction = Math.max(0, Math.min(kellyFraction, maxLeverage)); + + return accountSize * constrainedFraction; +} + +/** + * Fixed fractional position sizing + */ +export function fixedFractionalPositionSize( + accountSize: number, + riskPercentage: number, + stopLossPercentage: number, + price: number +): number { + // Input validation + if (stopLossPercentage <= 0 || price <= 0 || riskPercentage <= 0 || accountSize <= 0) return 0; + + const riskAmount = accountSize * (riskPercentage / 100); + const stopLossAmount = price * (stopLossPercentage / 100); + + return Math.floor(riskAmount / stopLossAmount); +} + +/** + * Volatility-adjusted position sizing + */ +export function volatilityAdjustedPositionSize( + accountSize: number, + targetVolatility: number, + assetVolatility: number, + price: number +): number { + // Input validation + if (assetVolatility <= 0 || price <= 0 || targetVolatility <= 0 || accountSize <= 0) return 0; + + const volatilityRatio = targetVolatility / assetVolatility; + const cappedRatio = Math.min(volatilityRatio, 3); // Cap at 3x leverage + const positionValue = accountSize * cappedRatio; + + return Math.floor(positionValue / price); +} + +/** + * Calculate position size with correlation adjustment + */ +export function correlationAdjustedPositionSize( + basePositionSize: number, + existingPositions: Array<{ size: number; correlation: number }>, + maxCorrelationRisk: number = 0.3 +): number { + if (existingPositions.length === 0 || basePositionSize <= 0) return basePositionSize; + + // Calculate portfolio correlation risk + // This should consider the correlation between the new position and existing ones + const totalCorrelationRisk = existingPositions.reduce((total, position) => { + // Weight correlation by position size relative to new position + const relativeSize = position.size / (basePositionSize + position.size); + return total + (relativeSize * Math.abs(position.correlation)); + }, 0); + + // Adjust position size based on correlation risk + const correlationAdjustment = Math.max(0.1, 1 - (totalCorrelationRisk / maxCorrelationRisk)); + + return Math.floor(basePositionSize * correlationAdjustment); +} + +/** + * Calculate portfolio heat (total risk across all positions) + */ +export function calculatePortfolioHeat( + positions: Array<{ value: number; risk: number }>, + accountSize: number +): number { + // Input validation + if (accountSize <= 0 || positions.length === 0) return 0; + + const totalRisk = positions.reduce((sum, position) => { + // Ensure risk values are positive + return sum + Math.max(0, position.risk); + }, 0); + + return Math.min((totalRisk / accountSize) * 100, 100); // Cap at 100% +} + +/** + * Dynamic position sizing based on market conditions + */ +export function dynamicPositionSize( + basePositionSize: number, + marketVolatility: number, + normalVolatility: number, + drawdownLevel: number, + maxDrawdownThreshold: number = 0.1 +): number { + // Input validation + if (basePositionSize <= 0 || marketVolatility <= 0 || normalVolatility <= 0) return 0; + if (drawdownLevel < 0 || maxDrawdownThreshold <= 0) return basePositionSize; + + // Volatility adjustment - reduce size when volatility is high + const volatilityAdjustment = Math.min(normalVolatility / marketVolatility, 2); // Cap at 2x + + // Drawdown adjustment - reduce size as drawdown increases + const normalizedDrawdown = Math.min(drawdownLevel / maxDrawdownThreshold, 1); + const drawdownAdjustment = Math.max(0.1, 1 - normalizedDrawdown); + + const adjustedSize = basePositionSize * volatilityAdjustment * drawdownAdjustment; + return Math.floor(Math.max(0, adjustedSize)); +} + +/** + * Calculate maximum position size based on liquidity + */ +export function liquidityConstrainedPositionSize( + desiredPositionSize: number, + averageDailyVolume: number, + maxVolumePercentage: number = 0.05, + price: number +): number { + if (averageDailyVolume === 0 || price === 0) return 0; + + const maxShares = averageDailyVolume * maxVolumePercentage; + + return Math.min(desiredPositionSize, maxShares); +} + +/** + * Multi-timeframe position sizing + */ +export function multiTimeframePositionSize( + accountSize: number, + shortTermSignal: number, // -1 to 1 + mediumTermSignal: number, // -1 to 1 + longTermSignal: number, // -1 to 1 + baseRiskPercentage: number = 1 +): number { + // Input validation + if (accountSize <= 0 || baseRiskPercentage <= 0) return 0; + + // Clamp signals to valid range + const clampedShort = Math.max(-1, Math.min(1, shortTermSignal)); + const clampedMedium = Math.max(-1, Math.min(1, mediumTermSignal)); + const clampedLong = Math.max(-1, Math.min(1, longTermSignal)); + + // Weight the signals (long-term gets higher weight) + const weightedSignal = ( + clampedShort * 0.2 + + clampedMedium * 0.3 + + clampedLong * 0.5 + ); + + // Adjust risk based on signal strength + const adjustedRisk = baseRiskPercentage * Math.abs(weightedSignal); + + return accountSize * (adjustedRisk / 100); +} + +/** + * Risk parity position sizing + */ +export function riskParityPositionSize( + assets: Array<{ volatility: number; price: number }>, + targetRisk: number, + accountSize: number +): number[] { + if (assets.length === 0) return []; + + // Calculate inverse volatility weights + const totalInverseVol = assets.reduce((sum, asset) => { + if (asset.volatility === 0) return sum; + return sum + (1 / asset.volatility); + }, 0); + + if (totalInverseVol === 0) return assets.map(() => 0); + + return assets.map(asset => { + if (asset.volatility === 0 || asset.price === 0) return 0; + // Calculate weight based on inverse volatility + const weight = (1 / asset.volatility) / totalInverseVol; + + // The weight itself already accounts for risk parity + // We just need to scale by target risk once + const positionValue = accountSize * weight * targetRisk; + return Math.floor(positionValue / asset.price); + }); +} + +/** + * Validate position size against risk limits + */ +export function validatePositionSize( + positionSize: number, + price: number, + accountSize: number, + maxPositionPercentage: number = 10, + maxLeverage: number = 1 +): { isValid: boolean; adjustedSize: number; violations: string[] } { + const violations: string[] = []; + let adjustedSize = positionSize; + + // Check maximum position percentage + const positionValue = positionSize * price; + const positionPercentage = (positionValue / accountSize) * 100; + + if (positionPercentage > maxPositionPercentage) { + violations.push(`Position exceeds maximum ${maxPositionPercentage}% of account`); + adjustedSize = (accountSize * maxPositionPercentage / 100) / price; + } + + // Check leverage limits + const leverage = positionValue / accountSize; + if (leverage > maxLeverage) { + violations.push(`Position exceeds maximum leverage of ${maxLeverage}x`); + adjustedSize = Math.min(adjustedSize, (accountSize * maxLeverage) / price); + } + + // Check minimum position size + if (adjustedSize < 1 && adjustedSize > 0) { + violations.push('Position size too small (less than 1 share)'); + adjustedSize = 0; + } + + return { + isValid: violations.length === 0, + adjustedSize: Math.max(0, adjustedSize), + violations + }; +} + +/** + * Optimal F position sizing (Ralph Vince's method) + */ +export function optimalFPositionSize( + accountSize: number, + historicalReturns: number[], + maxIterations: number = 100 +): number { + if (historicalReturns.length === 0 || accountSize <= 0) return 0; + + // Convert returns to P&L per unit + const pnlValues = historicalReturns.map(ret => ret * 1000); // Assuming $1000 per unit + + let bestF = 0; + let bestTWR = 0; // Terminal Wealth Relative + + // Test different f values (0.01 to 1.00) + for (let f = 0.01; f <= 1.0; f += 0.01) { + let twr = 1.0; + let valid = true; + + for (const pnl of pnlValues) { + const hpr = 1 + (f * pnl / 1000); // Holding Period Return + + if (hpr <= 0) { + valid = false; + break; + } + + twr *= hpr; + } + + if (valid && twr > bestTWR) { + bestTWR = twr; + bestF = f; + } + } + + // Apply safety factor + const safeF = bestF * 0.75; // 75% of optimal f for safety + + return accountSize * safeF; +} + +/** + * Secure F position sizing (safer version of Optimal F) + */ +export function secureFPositionSize( + accountSize: number, + historicalReturns: number[], + confidenceLevel: number = 0.95 +): number { + if (historicalReturns.length === 0 || accountSize <= 0) return 0; + + // Sort returns to find worst-case scenarios + const sortedReturns = [...historicalReturns].sort((a, b) => a - b); + const worstCaseIndex = Math.floor((1 - confidenceLevel) * sortedReturns.length); + const worstCaseReturn = sortedReturns[worstCaseIndex]; + + // Calculate maximum position size that won't bankrupt at confidence level + const maxLoss = Math.abs(worstCaseReturn); + const maxRiskPercentage = 0.02; // Never risk more than 2% on worst case + + if (maxLoss === 0) return accountSize * 0.1; // Default to 10% if no historical losses + + const secureF = Math.min(maxRiskPercentage / maxLoss, 0.25); // Cap at 25% + + return accountSize * secureF; +} diff --git a/libs/utils/src/calculations/risk-metrics.ts b/libs/utils/src/calculations/risk-metrics.ts index 5b52100..3098798 100644 --- a/libs/utils/src/calculations/risk-metrics.ts +++ b/libs/utils/src/calculations/risk-metrics.ts @@ -1,375 +1,375 @@ -/** - * Risk Metrics and Analysis - * Comprehensive risk measurement tools for portfolio and trading analysis - */ - -import { RiskMetrics, treynorRatio } from './index'; - -/** - * Calculate Value at Risk (VaR) using historical simulation - */ -export function valueAtRisk(returns: number[], confidenceLevel: number = 0.95): number { - if (returns.length === 0) return 0; - - const sortedReturns = [...returns].sort((a, b) => a - b); - const index = Math.floor((1 - confidenceLevel) * sortedReturns.length); - - return sortedReturns[index] || 0; -} - -/** - * Calculate Conditional Value at Risk (CVaR/Expected Shortfall) - */ -export function conditionalValueAtRisk(returns: number[], confidenceLevel: number = 0.95): number { - if (returns.length === 0) return 0; - - const sortedReturns = [...returns].sort((a, b) => a - b); - const cutoffIndex = Math.floor((1 - confidenceLevel) * sortedReturns.length); - - if (cutoffIndex === 0) return sortedReturns[0]; - - const tailReturns = sortedReturns.slice(0, cutoffIndex); - return tailReturns.reduce((sum, ret) => sum + ret, 0) / tailReturns.length; -} - -/** - * Calculate parametric VaR using normal distribution - */ -export function parametricVaR( - returns: number[], - confidenceLevel: number = 0.95, - portfolioValue: number = 1 -): number { - if (returns.length === 0) return 0; - - const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); - const stdDev = Math.sqrt(variance); - - // Z-score for confidence level (normal distribution) - const zScore = getZScore(confidenceLevel); - - return portfolioValue * (mean - zScore * stdDev); -} - -/** - * Calculate maximum drawdown - */ -export function maxDrawdown(equityCurve: number[]): number { - if (equityCurve.length < 2) return 0; - - let maxDD = 0; - let peak = equityCurve[0]; - - for (let i = 1; i < equityCurve.length; i++) { - if (equityCurve[i] > peak) { - peak = equityCurve[i]; - } else { - const drawdown = (peak - equityCurve[i]) / peak; - maxDD = Math.max(maxDD, drawdown); - } - } - - return maxDD; -} - -/** - * Calculate downside deviation - */ -export function downsideDeviation(returns: number[], targetReturn: number = 0): number { - if (returns.length === 0) return 0; - - const downsideReturns = returns.filter(ret => ret < targetReturn); - - if (downsideReturns.length === 0) return 0; - - const sumSquaredDownside = downsideReturns.reduce( - (sum, ret) => sum + Math.pow(ret - targetReturn, 2), - 0 - ); - - return Math.sqrt(sumSquaredDownside / returns.length); -} - -/** - * Calculate Sharpe ratio - */ -export function sharpeRatio(returns: number[], riskFreeRate: number = 0): number { - if (returns.length < 2) return 0; - - const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); - const stdDev = Math.sqrt(variance); - - if (stdDev === 0) return 0; - - return (mean - riskFreeRate) / stdDev; -} - - -/** - * Calculate beta coefficient - */ -export function beta(portfolioReturns: number[], marketReturns: number[]): number { - if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < 2) { - return 0; - } - - const n = portfolioReturns.length; - const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / n; - const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / n; - - let covariance = 0; - let marketVariance = 0; - - for (let i = 0; i < n; i++) { - const portfolioDiff = portfolioReturns[i] - portfolioMean; - const marketDiff = marketReturns[i] - marketMean; - - covariance += portfolioDiff * marketDiff; - marketVariance += marketDiff * marketDiff; - } - - return marketVariance === 0 ? 0 : covariance / marketVariance; -} - -/** - * Calculate alpha - */ -export function alpha( - portfolioReturns: number[], - marketReturns: number[], - riskFreeRate: number = 0 -): number { - const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; - const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length; - const portfolioBeta = beta(portfolioReturns, marketReturns); - - return portfolioMean - (riskFreeRate + portfolioBeta * (marketMean - riskFreeRate)); -} - -/** - * Calculate tracking error - */ -export function trackingError(portfolioReturns: number[], benchmarkReturns: number[]): number { - if (portfolioReturns.length !== benchmarkReturns.length || portfolioReturns.length === 0) { - return 0; - } - - const activeReturns = portfolioReturns.map((ret, i) => ret - benchmarkReturns[i]); - const mean = activeReturns.reduce((sum, ret) => sum + ret, 0) / activeReturns.length; - - const variance = activeReturns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (activeReturns.length - 1); - - return Math.sqrt(variance); -} - -/** - * Calculate volatility (standard deviation of returns) - */ -export function volatility(returns: number[]): number { - if (returns.length < 2) return 0; - - const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); - - return Math.sqrt(variance); -} - -/** - * Calculate annualized volatility - */ -export function annualizedVolatility(returns: number[], periodsPerYear: number = 252): number { - return volatility(returns) * Math.sqrt(periodsPerYear); -} - -/** - * Calculate skewness (measure of asymmetry) - */ -export function skewness(returns: number[]): number { - if (returns.length < 3) return 0; - - const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length; - const stdDev = Math.sqrt(variance); - - if (stdDev === 0) return 0; - - const skew = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 3), 0) / returns.length; - - return skew; -} - -/** - * Calculate kurtosis (measure of tail heaviness) - */ -export function kurtosis(returns: number[]): number { - if (returns.length < 4) return 0; - - const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length; - const stdDev = Math.sqrt(variance); - - if (stdDev === 0) return 0; - - const kurt = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 4), 0) / returns.length; - - return kurt - 3; // Excess kurtosis (subtract 3 for normal distribution baseline) -} - -/** - * Calculate comprehensive risk metrics - */ -export function calculateRiskMetrics( - returns: number[], - equityCurve: number[], - marketReturns?: number[], - riskFreeRate: number = 0 -): RiskMetrics { - if (returns.length === 0) { - return { - var95: 0, - var99: 0, - cvar95: 0, - maxDrawdown: 0, - volatility: 0, - downside_deviation: 0, - calmar_ratio: 0, - sortino_ratio: 0, - beta: 0, - alpha: 0, - sharpeRatio: 0, - treynorRatio: 0, - trackingError: 0, - informationRatio: 0 - }; - } - - const portfolioVolatility = volatility(returns); - const portfolioMean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; - // Calculate VaR - const var95Value = valueAtRisk(returns, 0.95); - const var99Value = valueAtRisk(returns, 0.99); - const cvar95Value = conditionalValueAtRisk(returns, 0.95); - - // Calculate max drawdown - const maxDD = maxDrawdown(equityCurve); - - // Calculate downside deviation - const downsideDeviationValue = downsideDeviation(returns); - // Calculate ratios - const calmarRatio = maxDD > 0 ? portfolioMean / maxDD : 0; - const sortinoRatio = downsideDeviationValue > 0 ? (portfolioMean - riskFreeRate) / downsideDeviationValue : 0; - const sharpeRatio = portfolioVolatility > 0 ? (portfolioMean - riskFreeRate) / portfolioVolatility : 0; - - let portfolioBeta = 0; - let portfolioAlpha = 0; - let portfolioTreynorRatio = 0; - let portfolioTrackingError = 0; - let informationRatio = 0; - - if (marketReturns && marketReturns.length === returns.length) { - portfolioBeta = beta(returns, marketReturns); - portfolioAlpha = alpha(returns, marketReturns, riskFreeRate); - portfolioTreynorRatio = treynorRatio(returns, marketReturns, riskFreeRate); - portfolioTrackingError = trackingError(returns, marketReturns); - informationRatio = portfolioTrackingError > 0 ? portfolioAlpha / portfolioTrackingError : 0; - } - return { - var95: var95Value, - var99: var99Value, - cvar95: cvar95Value, - maxDrawdown: maxDD, - volatility: portfolioVolatility, - downside_deviation: downsideDeviationValue, - calmar_ratio: calmarRatio, - sortino_ratio: sortinoRatio, - beta: portfolioBeta, - alpha: portfolioAlpha, - sharpeRatio, - treynorRatio: portfolioTreynorRatio, - trackingError: portfolioTrackingError, - informationRatio - }; -} - -/** - * Helper function to get Z-score for confidence level - * This implementation handles arbitrary confidence levels - */ -function getZScore(confidenceLevel: number): number { - // First check our lookup table for common values (more precise) - const zScores: { [key: string]: number } = { - '0.90': 1.282, - '0.95': 1.645, - '0.975': 1.960, - '0.99': 2.326, - '0.995': 2.576 - }; - - const key = confidenceLevel.toString(); - if (zScores[key]) return zScores[key]; - - // For arbitrary confidence levels, use approximation - if (confidenceLevel < 0.5) return -getZScore(1 - confidenceLevel); - - if (confidenceLevel >= 0.999) return 3.09; // Cap at 99.9% for numerical stability - - // Approximation of inverse normal CDF - const y = Math.sqrt(-2.0 * Math.log(1.0 - confidenceLevel)); - return y - (2.515517 + 0.802853 * y + 0.010328 * y * y) / - (1.0 + 1.432788 * y + 0.189269 * y * y + 0.001308 * y * y * y); -} - -/** - * Calculate portfolio risk contribution - */ -export function riskContribution( - weights: number[], - covarianceMatrix: number[][], - portfolioVolatility: number -): number[] { - const n = weights.length; - const contributions: number[] = []; - - for (let i = 0; i < n; i++) { - let marginalContribution = 0; - - for (let j = 0; j < n; j++) { - marginalContribution += weights[j] * covarianceMatrix[i][j]; - } - - const contribution = (weights[i] * marginalContribution) / Math.pow(portfolioVolatility, 2); - contributions.push(contribution); - } - - return contributions; -} - -/** - * Calculate Ulcer Index - */ -export function ulcerIndex(equityCurve: Array<{ value: number; date: Date }>): number { - let sumSquaredDrawdown = 0; - let peak = equityCurve[0].value; - - for (const point of equityCurve) { - peak = Math.max(peak, point.value); - const drawdownPercent = (peak - point.value) / peak * 100; - sumSquaredDrawdown += drawdownPercent * drawdownPercent; - } - - return Math.sqrt(sumSquaredDrawdown / equityCurve.length); -} - -/** - * Calculate risk-adjusted return (RAR) - */ -export function riskAdjustedReturn( - portfolioReturn: number, - portfolioRisk: number, - riskFreeRate: number = 0 -): number { - if (portfolioRisk === 0) return 0; - return (portfolioReturn - riskFreeRate) / portfolioRisk; -} +/** + * Risk Metrics and Analysis + * Comprehensive risk measurement tools for portfolio and trading analysis + */ + +import { RiskMetrics, treynorRatio } from './index'; + +/** + * Calculate Value at Risk (VaR) using historical simulation + */ +export function valueAtRisk(returns: number[], confidenceLevel: number = 0.95): number { + if (returns.length === 0) return 0; + + const sortedReturns = [...returns].sort((a, b) => a - b); + const index = Math.floor((1 - confidenceLevel) * sortedReturns.length); + + return sortedReturns[index] || 0; +} + +/** + * Calculate Conditional Value at Risk (CVaR/Expected Shortfall) + */ +export function conditionalValueAtRisk(returns: number[], confidenceLevel: number = 0.95): number { + if (returns.length === 0) return 0; + + const sortedReturns = [...returns].sort((a, b) => a - b); + const cutoffIndex = Math.floor((1 - confidenceLevel) * sortedReturns.length); + + if (cutoffIndex === 0) return sortedReturns[0]; + + const tailReturns = sortedReturns.slice(0, cutoffIndex); + return tailReturns.reduce((sum, ret) => sum + ret, 0) / tailReturns.length; +} + +/** + * Calculate parametric VaR using normal distribution + */ +export function parametricVaR( + returns: number[], + confidenceLevel: number = 0.95, + portfolioValue: number = 1 +): number { + if (returns.length === 0) return 0; + + const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); + const stdDev = Math.sqrt(variance); + + // Z-score for confidence level (normal distribution) + const zScore = getZScore(confidenceLevel); + + return portfolioValue * (mean - zScore * stdDev); +} + +/** + * Calculate maximum drawdown + */ +export function maxDrawdown(equityCurve: number[]): number { + if (equityCurve.length < 2) return 0; + + let maxDD = 0; + let peak = equityCurve[0]; + + for (let i = 1; i < equityCurve.length; i++) { + if (equityCurve[i] > peak) { + peak = equityCurve[i]; + } else { + const drawdown = (peak - equityCurve[i]) / peak; + maxDD = Math.max(maxDD, drawdown); + } + } + + return maxDD; +} + +/** + * Calculate downside deviation + */ +export function downsideDeviation(returns: number[], targetReturn: number = 0): number { + if (returns.length === 0) return 0; + + const downsideReturns = returns.filter(ret => ret < targetReturn); + + if (downsideReturns.length === 0) return 0; + + const sumSquaredDownside = downsideReturns.reduce( + (sum, ret) => sum + Math.pow(ret - targetReturn, 2), + 0 + ); + + return Math.sqrt(sumSquaredDownside / returns.length); +} + +/** + * Calculate Sharpe ratio + */ +export function sharpeRatio(returns: number[], riskFreeRate: number = 0): number { + if (returns.length < 2) return 0; + + const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); + const stdDev = Math.sqrt(variance); + + if (stdDev === 0) return 0; + + return (mean - riskFreeRate) / stdDev; +} + + +/** + * Calculate beta coefficient + */ +export function beta(portfolioReturns: number[], marketReturns: number[]): number { + if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < 2) { + return 0; + } + + const n = portfolioReturns.length; + const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / n; + const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / n; + + let covariance = 0; + let marketVariance = 0; + + for (let i = 0; i < n; i++) { + const portfolioDiff = portfolioReturns[i] - portfolioMean; + const marketDiff = marketReturns[i] - marketMean; + + covariance += portfolioDiff * marketDiff; + marketVariance += marketDiff * marketDiff; + } + + return marketVariance === 0 ? 0 : covariance / marketVariance; +} + +/** + * Calculate alpha + */ +export function alpha( + portfolioReturns: number[], + marketReturns: number[], + riskFreeRate: number = 0 +): number { + const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length; + const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length; + const portfolioBeta = beta(portfolioReturns, marketReturns); + + return portfolioMean - (riskFreeRate + portfolioBeta * (marketMean - riskFreeRate)); +} + +/** + * Calculate tracking error + */ +export function trackingError(portfolioReturns: number[], benchmarkReturns: number[]): number { + if (portfolioReturns.length !== benchmarkReturns.length || portfolioReturns.length === 0) { + return 0; + } + + const activeReturns = portfolioReturns.map((ret, i) => ret - benchmarkReturns[i]); + const mean = activeReturns.reduce((sum, ret) => sum + ret, 0) / activeReturns.length; + + const variance = activeReturns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (activeReturns.length - 1); + + return Math.sqrt(variance); +} + +/** + * Calculate volatility (standard deviation of returns) + */ +export function volatility(returns: number[]): number { + if (returns.length < 2) return 0; + + const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1); + + return Math.sqrt(variance); +} + +/** + * Calculate annualized volatility + */ +export function annualizedVolatility(returns: number[], periodsPerYear: number = 252): number { + return volatility(returns) * Math.sqrt(periodsPerYear); +} + +/** + * Calculate skewness (measure of asymmetry) + */ +export function skewness(returns: number[]): number { + if (returns.length < 3) return 0; + + const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length; + const stdDev = Math.sqrt(variance); + + if (stdDev === 0) return 0; + + const skew = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 3), 0) / returns.length; + + return skew; +} + +/** + * Calculate kurtosis (measure of tail heaviness) + */ +export function kurtosis(returns: number[]): number { + if (returns.length < 4) return 0; + + const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length; + const stdDev = Math.sqrt(variance); + + if (stdDev === 0) return 0; + + const kurt = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 4), 0) / returns.length; + + return kurt - 3; // Excess kurtosis (subtract 3 for normal distribution baseline) +} + +/** + * Calculate comprehensive risk metrics + */ +export function calculateRiskMetrics( + returns: number[], + equityCurve: number[], + marketReturns?: number[], + riskFreeRate: number = 0 +): RiskMetrics { + if (returns.length === 0) { + return { + var95: 0, + var99: 0, + cvar95: 0, + maxDrawdown: 0, + volatility: 0, + downside_deviation: 0, + calmar_ratio: 0, + sortino_ratio: 0, + beta: 0, + alpha: 0, + sharpeRatio: 0, + treynorRatio: 0, + trackingError: 0, + informationRatio: 0 + }; + } + + const portfolioVolatility = volatility(returns); + const portfolioMean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; + // Calculate VaR + const var95Value = valueAtRisk(returns, 0.95); + const var99Value = valueAtRisk(returns, 0.99); + const cvar95Value = conditionalValueAtRisk(returns, 0.95); + + // Calculate max drawdown + const maxDD = maxDrawdown(equityCurve); + + // Calculate downside deviation + const downsideDeviationValue = downsideDeviation(returns); + // Calculate ratios + const calmarRatio = maxDD > 0 ? portfolioMean / maxDD : 0; + const sortinoRatio = downsideDeviationValue > 0 ? (portfolioMean - riskFreeRate) / downsideDeviationValue : 0; + const sharpeRatio = portfolioVolatility > 0 ? (portfolioMean - riskFreeRate) / portfolioVolatility : 0; + + let portfolioBeta = 0; + let portfolioAlpha = 0; + let portfolioTreynorRatio = 0; + let portfolioTrackingError = 0; + let informationRatio = 0; + + if (marketReturns && marketReturns.length === returns.length) { + portfolioBeta = beta(returns, marketReturns); + portfolioAlpha = alpha(returns, marketReturns, riskFreeRate); + portfolioTreynorRatio = treynorRatio(returns, marketReturns, riskFreeRate); + portfolioTrackingError = trackingError(returns, marketReturns); + informationRatio = portfolioTrackingError > 0 ? portfolioAlpha / portfolioTrackingError : 0; + } + return { + var95: var95Value, + var99: var99Value, + cvar95: cvar95Value, + maxDrawdown: maxDD, + volatility: portfolioVolatility, + downside_deviation: downsideDeviationValue, + calmar_ratio: calmarRatio, + sortino_ratio: sortinoRatio, + beta: portfolioBeta, + alpha: portfolioAlpha, + sharpeRatio, + treynorRatio: portfolioTreynorRatio, + trackingError: portfolioTrackingError, + informationRatio + }; +} + +/** + * Helper function to get Z-score for confidence level + * This implementation handles arbitrary confidence levels + */ +function getZScore(confidenceLevel: number): number { + // First check our lookup table for common values (more precise) + const zScores: { [key: string]: number } = { + '0.90': 1.282, + '0.95': 1.645, + '0.975': 1.960, + '0.99': 2.326, + '0.995': 2.576 + }; + + const key = confidenceLevel.toString(); + if (zScores[key]) return zScores[key]; + + // For arbitrary confidence levels, use approximation + if (confidenceLevel < 0.5) return -getZScore(1 - confidenceLevel); + + if (confidenceLevel >= 0.999) return 3.09; // Cap at 99.9% for numerical stability + + // Approximation of inverse normal CDF + const y = Math.sqrt(-2.0 * Math.log(1.0 - confidenceLevel)); + return y - (2.515517 + 0.802853 * y + 0.010328 * y * y) / + (1.0 + 1.432788 * y + 0.189269 * y * y + 0.001308 * y * y * y); +} + +/** + * Calculate portfolio risk contribution + */ +export function riskContribution( + weights: number[], + covarianceMatrix: number[][], + portfolioVolatility: number +): number[] { + const n = weights.length; + const contributions: number[] = []; + + for (let i = 0; i < n; i++) { + let marginalContribution = 0; + + for (let j = 0; j < n; j++) { + marginalContribution += weights[j] * covarianceMatrix[i][j]; + } + + const contribution = (weights[i] * marginalContribution) / Math.pow(portfolioVolatility, 2); + contributions.push(contribution); + } + + return contributions; +} + +/** + * Calculate Ulcer Index + */ +export function ulcerIndex(equityCurve: Array<{ value: number; date: Date }>): number { + let sumSquaredDrawdown = 0; + let peak = equityCurve[0].value; + + for (const point of equityCurve) { + peak = Math.max(peak, point.value); + const drawdownPercent = (peak - point.value) / peak * 100; + sumSquaredDrawdown += drawdownPercent * drawdownPercent; + } + + return Math.sqrt(sumSquaredDrawdown / equityCurve.length); +} + +/** + * Calculate risk-adjusted return (RAR) + */ +export function riskAdjustedReturn( + portfolioReturn: number, + portfolioRisk: number, + riskFreeRate: number = 0 +): number { + if (portfolioRisk === 0) return 0; + return (portfolioReturn - riskFreeRate) / portfolioRisk; +} diff --git a/libs/utils/src/calculations/technical-indicators.ts b/libs/utils/src/calculations/technical-indicators.ts index 47638f6..5fbb36a 100644 --- a/libs/utils/src/calculations/technical-indicators.ts +++ b/libs/utils/src/calculations/technical-indicators.ts @@ -1,2325 +1,2325 @@ -/** - * Technical Indicators - * Comprehensive set of technical analysis indicators - */ - -import { OHLCVData } from './index'; - -/** - * Simple Moving Average - */ -export function sma(values: number[], period: number): number[] { - if (period > values.length) return []; - - const result: number[] = []; - - for (let i = period - 1; i < values.length; i++) { - const sum = values.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0); - result.push(sum / period); - } - - return result; -} - -/** - * Exponential Moving Average - */ -export function ema(values: number[], period: number): number[] { - if (period > values.length) return []; - - const result: number[] = []; - const multiplier = 2 / (period + 1); - - // Start with SMA for first value - let ema = values.slice(0, period).reduce((a, b) => a + b, 0) / period; - result.push(ema); - - for (let i = period; i < values.length; i++) { - ema = (values[i] * multiplier) + (ema * (1 - multiplier)); - result.push(ema); - } - - return result; -} - -/** - * Relative Strength Index (RSI) - */ -export function rsi(prices: number[], period: number = 14): number[] { - if (period >= prices.length) return []; - - const gains: number[] = []; - const losses: number[] = []; - - // Calculate gains and losses - for (let i = 1; i < prices.length; i++) { - const change = prices[i] - prices[i - 1]; - gains.push(change > 0 ? change : 0); - losses.push(change < 0 ? Math.abs(change) : 0); - } - - const result: number[] = []; - - // Calculate RSI - for (let i = period - 1; i < gains.length; i++) { - const avgGain = gains.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0) / period; - const avgLoss = losses.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0) / period; - - if (avgLoss === 0) { - result.push(100); - } else { - const rs = avgGain / avgLoss; - const rsiValue = 100 - (100 / (1 + rs)); - result.push(rsiValue); - } - } - - return result; -} - -/** - * Moving Average Convergence Divergence (MACD) - */ -export function macd( - prices: number[], - fastPeriod: number = 12, - slowPeriod: number = 26, - signalPeriod: number = 9 -): { macd: number[], signal: number[], histogram: number[] } { - const fastEMA = ema(prices, fastPeriod); - const slowEMA = ema(prices, slowPeriod); - - const macdLine: number[] = []; - const startIndex = slowPeriod - fastPeriod; - - for (let i = 0; i < fastEMA.length - startIndex; i++) { - macdLine.push(fastEMA[i + startIndex] - slowEMA[i]); - } - - const signalLine = ema(macdLine, signalPeriod); - const histogram: number[] = []; - - const signalStartIndex = signalPeriod - 1; - for (let i = 0; i < signalLine.length; i++) { - histogram.push(macdLine[i + signalStartIndex] - signalLine[i]); - } - - return { - macd: macdLine, - signal: signalLine, - histogram: histogram - }; -} - -/** - * Bollinger Bands - */ -export function bollingerBands( - prices: number[], - period: number = 20, - standardDeviations: number = 2 -): { upper: number[], middle: number[], lower: number[] } { - const middle = sma(prices, period); - const upper: number[] = []; - const lower: number[] = []; - - for (let i = period - 1; i < prices.length; i++) { - const slice = prices.slice(i - period + 1, i + 1); - const mean = slice.reduce((a, b) => a + b, 0) / period; - const variance = slice.reduce((a, b) => a + Math.pow(b - mean, 2), 0) / period; - const stdDev = Math.sqrt(variance); - - const middleValue = middle[i - period + 1]; - upper.push(middleValue + (standardDeviations * stdDev)); - lower.push(middleValue - (standardDeviations * stdDev)); - } - - return { upper, middle, lower }; -} - -/** - * Average True Range (ATR) - */ -export function atr(ohlcv: OHLCVData[], period: number = 14): number[] { - if (period >= ohlcv.length) return []; - - const trueRanges: number[] = []; - - for (let i = 1; i < ohlcv.length; i++) { - const high = ohlcv[i].high; - const low = ohlcv[i].low; - const prevClose = ohlcv[i - 1].close; - - const tr = Math.max( - high - low, - Math.abs(high - prevClose), - Math.abs(low - prevClose) - ); - - trueRanges.push(tr); - } - - return sma(trueRanges, period); -} - -/** - * Stochastic Oscillator - */ -export function stochastic( - ohlcv: OHLCVData[], - kPeriod: number = 14, - dPeriod: number = 3 -): { k: number[], d: number[] } { - if (kPeriod >= ohlcv.length) return { k: [], d: [] }; - - const kValues: number[] = []; - - for (let i = kPeriod - 1; i < ohlcv.length; i++) { - const slice = ohlcv.slice(i - kPeriod + 1, i + 1); - const highest = Math.max(...slice.map(d => d.high)); - const lowest = Math.min(...slice.map(d => d.low)); - const currentClose = ohlcv[i].close; - - if (highest === lowest) { - kValues.push(50); // Avoid division by zero - } else { - const kValue = ((currentClose - lowest) / (highest - lowest)) * 100; - kValues.push(kValue); - } - } - - const dValues = sma(kValues, dPeriod); - - return { k: kValues, d: dValues }; -} - -/** - * Williams %R - */ -export function williamsR(ohlcv: OHLCVData[], period: number = 14): number[] { - if (period >= ohlcv.length) return []; - - const result: number[] = []; - - for (let i = period - 1; i < ohlcv.length; i++) { - const slice = ohlcv.slice(i - period + 1, i + 1); - const highest = Math.max(...slice.map(d => d.high)); - const lowest = Math.min(...slice.map(d => d.low)); - const currentClose = ohlcv[i].close; - - if (highest === lowest) { - result.push(-50); // Avoid division by zero - } else { - const wrValue = ((highest - currentClose) / (highest - lowest)) * -100; - result.push(wrValue); - } - } - - return result; -} - -/** - * Commodity Channel Index (CCI) - */ -export function cci(ohlcv: OHLCVData[], period: number = 20): number[] { - if (period >= ohlcv.length) return []; - - const typicalPrices = ohlcv.map(d => (d.high + d.low + d.close) / 3); - const smaTP = sma(typicalPrices, period); - const result: number[] = []; - - for (let i = 0; i < smaTP.length; i++) { - const slice = typicalPrices.slice(i, i + period); - const mean = smaTP[i]; - const meanDeviation = slice.reduce((sum, value) => sum + Math.abs(value - mean), 0) / period; - - if (meanDeviation === 0) { - result.push(0); - } else { - const cciValue = (typicalPrices[i + period - 1] - mean) / (0.015 * meanDeviation); - result.push(cciValue); - } - } - - return result; -} - -/** - * Momentum - */ -export function momentum(prices: number[], period: number = 10): number[] { - if (period >= prices.length) return []; - - const result: number[] = []; - - for (let i = period; i < prices.length; i++) { - const momentum = prices[i] - prices[i - period]; - result.push(momentum); - } - - return result; -} - -/** - * Rate of Change (ROC) - */ -export function roc(prices: number[], period: number = 10): number[] { - if (period >= prices.length) return []; - - const result: number[] = []; - - for (let i = period; i < prices.length; i++) { - if (prices[i - period] === 0) { - result.push(0); - } else { - const rocValue = ((prices[i] - prices[i - period]) / prices[i - period]) * 100; - result.push(rocValue); - } - } - - return result; -} - -/** - * Money Flow Index (MFI) - */ -export function mfi(ohlcv: OHLCVData[], period: number = 14): number[] { - if (period >= ohlcv.length) return []; - - const typicalPrices = ohlcv.map(d => (d.high + d.low + d.close) / 3); - const moneyFlows = ohlcv.map((d, i) => typicalPrices[i] * d.volume); - - const result: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - let positiveFlow = 0; - let negativeFlow = 0; - - for (let j = i - period + 1; j <= i; j++) { - if (j > 0) { - if (typicalPrices[j] > typicalPrices[j - 1]) { - positiveFlow += moneyFlows[j]; - } else if (typicalPrices[j] < typicalPrices[j - 1]) { - negativeFlow += moneyFlows[j]; - } - } - } - - if (negativeFlow === 0) { - result.push(100); - } else { - const mfiRatio = positiveFlow / negativeFlow; - const mfiValue = 100 - (100 / (1 + mfiRatio)); - result.push(mfiValue); - } - } - - return result; -} - -/** - * On-Balance Volume (OBV) - */ -export function obv(ohlcv: OHLCVData[]): number[] { - if (ohlcv.length === 0) return []; - - const result: number[] = [ohlcv[0].volume]; - - for (let i = 1; i < ohlcv.length; i++) { - const prev = ohlcv[i - 1]; - const curr = ohlcv[i]; - - if (curr.close > prev.close) { - result.push(result[result.length - 1] + curr.volume); - } else if (curr.close < prev.close) { - result.push(result[result.length - 1] - curr.volume); - } else { - result.push(result[result.length - 1]); - } - } - - return result; -} - -/** - * Accumulation/Distribution Line - */ -export function accumulationDistribution(ohlcv: OHLCVData[]): number[] { - if (ohlcv.length === 0) return []; - - const result: number[] = []; - let adLine = 0; - - for (const candle of ohlcv) { - if (candle.high === candle.low) { - // Avoid division by zero - result.push(adLine); - continue; - } - - const moneyFlowMultiplier = ((candle.close - candle.low) - (candle.high - candle.close)) / (candle.high - candle.low); - const moneyFlowVolume = moneyFlowMultiplier * candle.volume; - adLine += moneyFlowVolume; - result.push(adLine); - } - - return result; -} - -/** - * Chaikin Money Flow (CMF) - */ -export function chaikinMoneyFlow(ohlcv: OHLCVData[], period: number = 20): number[] { - if (period >= ohlcv.length) return []; - - const adValues: number[] = []; - - for (const candle of ohlcv) { - if (candle.high === candle.low) { - adValues.push(0); - } else { - const moneyFlowMultiplier = ((candle.close - candle.low) - (candle.high - candle.close)) / (candle.high - candle.low); - const moneyFlowVolume = moneyFlowMultiplier * candle.volume; - adValues.push(moneyFlowVolume); - } - } - - const result: number[] = []; - - for (let i = period - 1; i < ohlcv.length; i++) { - const sumAD = adValues.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0); - const sumVolume = ohlcv.slice(i - period + 1, i + 1).reduce((a, b) => a + b.volume, 0); - - if (sumVolume === 0) { - result.push(0); - } else { - result.push(sumAD / sumVolume); - } - } - - return result; -} - -/** - * Parabolic SAR - */ -export function parabolicSAR( - ohlcv: OHLCVData[], - step: number = 0.02, - maxStep: number = 0.2 -): number[] { - if (ohlcv.length < 2) return []; - - const result: number[] = []; - let trend = 1; // 1 for uptrend, -1 for downtrend - let acceleration = step; - let extremePoint = ohlcv[0].high; - let sar = ohlcv[0].low; - - result.push(sar); - - for (let i = 1; i < ohlcv.length; i++) { - const curr = ohlcv[i]; - const prev = ohlcv[i - 1]; - - // Calculate new SAR - sar = sar + acceleration * (extremePoint - sar); - - if (trend === 1) { // Uptrend - if (curr.low <= sar) { - // Trend reversal - trend = -1; - sar = extremePoint; - extremePoint = curr.low; - acceleration = step; - } else { - if (curr.high > extremePoint) { - extremePoint = curr.high; - acceleration = Math.min(acceleration + step, maxStep); - } - // Ensure SAR doesn't exceed previous lows - sar = Math.min(sar, prev.low, i > 1 ? ohlcv[i - 2].low : prev.low); - } - } else { // Downtrend - if (curr.high >= sar) { - // Trend reversal - trend = 1; - sar = extremePoint; - extremePoint = curr.high; - acceleration = step; - } else { - if (curr.low < extremePoint) { - extremePoint = curr.low; - acceleration = Math.min(acceleration + step, maxStep); - } - // Ensure SAR doesn't exceed previous highs - sar = Math.max(sar, prev.high, i > 1 ? ohlcv[i - 2].high : prev.high); - } - } - - result.push(sar); - } - - return result; -} - -/** - * Aroon Indicator - */ -export function aroon(ohlcv: OHLCVData[], period: number = 14): { up: number[], down: number[] } { - if (period >= ohlcv.length) return { up: [], down: [] }; - - const up: number[] = []; - const down: number[] = []; - - for (let i = period - 1; i < ohlcv.length; i++) { - const slice = ohlcv.slice(i - period + 1, i + 1); - - // Find highest high and lowest low positions - let highestIndex = 0; - let lowestIndex = 0; - - for (let j = 1; j < slice.length; j++) { - if (slice[j].high > slice[highestIndex].high) { - highestIndex = j; - } - if (slice[j].low < slice[lowestIndex].low) { - lowestIndex = j; - } - } - - const aroonUp = ((period - 1 - highestIndex) / (period - 1)) * 100; - const aroonDown = ((period - 1 - lowestIndex) / (period - 1)) * 100; - - up.push(aroonUp); - down.push(aroonDown); - } - - return { up, down }; -} - -/** - * Average Directional Movement Index (ADX) and Directional Movement Indicators (DMI) - */ -export function adx(ohlcv: OHLCVData[], period: number = 14): { adx: number[], plusDI: number[], minusDI: number[] } { - if (period >= ohlcv.length) return { adx: [], plusDI: [], minusDI: [] }; - - const trueRanges: number[] = []; - const plusDM: number[] = []; - const minusDM: number[] = []; - - // Calculate True Range and Directional Movements - for (let i = 1; i < ohlcv.length; i++) { - const current = ohlcv[i]; - const previous = ohlcv[i - 1]; - - // True Range - const tr = Math.max( - current.high - current.low, - Math.abs(current.high - previous.close), - Math.abs(current.low - previous.close) - ); - trueRanges.push(tr); - - // Directional Movements - const highDiff = current.high - previous.high; - const lowDiff = previous.low - current.low; - - const plusDMValue = (highDiff > lowDiff && highDiff > 0) ? highDiff : 0; - const minusDMValue = (lowDiff > highDiff && lowDiff > 0) ? lowDiff : 0; - - plusDM.push(plusDMValue); - minusDM.push(minusDMValue); - } - - // Calculate smoothed averages - const atrValues = sma(trueRanges, period); - const smoothedPlusDM = sma(plusDM, period); - const smoothedMinusDM = sma(minusDM, period); - - const plusDI: number[] = []; - const minusDI: number[] = []; - const dx: number[] = []; - - // Calculate DI+ and DI- - for (let i = 0; i < atrValues.length; i++) { - const diPlus = atrValues[i] > 0 ? (smoothedPlusDM[i] / atrValues[i]) * 100 : 0; - const diMinus = atrValues[i] > 0 ? (smoothedMinusDM[i] / atrValues[i]) * 100 : 0; - - plusDI.push(diPlus); - minusDI.push(diMinus); - - // Calculate DX - const diSum = diPlus + diMinus; - const dxValue = diSum > 0 ? (Math.abs(diPlus - diMinus) / diSum) * 100 : 0; - dx.push(dxValue); - } - - // Calculate ADX (smoothed DX) - const adxValues = sma(dx, period); - - return { - adx: adxValues, - plusDI: plusDI.slice(period - 1), - minusDI: minusDI.slice(period - 1) - }; -} - -/** - * Volume Weighted Moving Average (VWMA) - */ -export function vwma(ohlcv: OHLCVData[], period: number = 20): number[] { - if (period >= ohlcv.length) return []; - - const result: number[] = []; - - for (let i = period - 1; i < ohlcv.length; i++) { - const slice = ohlcv.slice(i - period + 1, i + 1); - - let totalVolumePrice = 0; - let totalVolume = 0; - - for (const candle of slice) { - const typicalPrice = (candle.high + candle.low + candle.close) / 3; - totalVolumePrice += typicalPrice * candle.volume; - totalVolume += candle.volume; - } - - const vwmaValue = totalVolume > 0 ? totalVolumePrice / totalVolume : 0; - result.push(vwmaValue); - } - - return result; -} - -/** - * Pivot Points (Standard) - */ -export function pivotPoints(ohlcv: OHLCVData[]): Array<{ - pivot: number; - resistance1: number; - resistance2: number; - resistance3: number; - support1: number; - support2: number; - support3: number; -}> { - if (ohlcv.length === 0) return []; - - const result: Array<{ - pivot: number; - resistance1: number; - resistance2: number; - resistance3: number; - support1: number; - support2: number; - support3: number; - }> = []; - - for (let i = 0; i < ohlcv.length; i++) { - const candle = ohlcv[i]; - - // Calculate pivot point - const pivot = (candle.high + candle.low + candle.close) / 3; - - // Calculate resistance and support levels - const resistance1 = (2 * pivot) - candle.low; - const support1 = (2 * pivot) - candle.high; - - const resistance2 = pivot + (candle.high - candle.low); - const support2 = pivot - (candle.high - candle.low); - - const resistance3 = candle.high + 2 * (pivot - candle.low); - const support3 = candle.low - 2 * (candle.high - pivot); - - result.push({ - pivot, - resistance1, - resistance2, - resistance3, - support1, - support2, - support3 - }); - } - - return result; -} - -/** - * Ichimoku Cloud - */ -export function ichimokuCloud( - ohlcv: OHLCVData[], - tenkanSenPeriod: number = 9, - kijunSenPeriod: number = 26, - senkouSpanBPeriod: number = 52 -): { - tenkanSen: number[]; - kijunSen: number[]; - senkouSpanA: number[]; - senkouSpanB: number[]; - chikouSpan: number[]; -} { - const { high, low, close } = { - high: ohlcv.map(item => item.high), - low: ohlcv.map(item => item.low), - close: ohlcv.map(item => item.close) - }; - - const tenkanSen = calculateTenkanSen(high, low, tenkanSenPeriod); - const kijunSen = calculateKijunSen(high, low, kijunSenPeriod); - const senkouSpanA = calculateSenkouSpanA(tenkanSen, kijunSen); - const senkouSpanB = calculateSenkouSpanB(high, low, senkouSpanBPeriod); - const chikouSpan = calculateChikouSpan(close, kijunSenPeriod); - - return { - tenkanSen, - kijunSen, - senkouSpanA, - senkouSpanB, - chikouSpan - }; - - function calculateTenkanSen(high: number[], low: number[], period: number): number[] { - const tenkanSen: number[] = []; - for (let i = period - 1; i < high.length; i++) { - const sliceHigh = high.slice(i - period + 1, i + 1); - const sliceLow = low.slice(i - period + 1, i + 1); - const highestHigh = Math.max(...sliceHigh); - const lowestLow = Math.min(...sliceLow); - tenkanSen.push((highestHigh + lowestLow) / 2); - } - return tenkanSen; - } - - function calculateKijunSen(high: number[], low: number[], period: number): number[] { - const kijunSen: number[] = []; - for (let i = period - 1; i < high.length; i++) { - const sliceHigh = high.slice(i - period + 1, i + 1); - const sliceLow = low.slice(i - period + 1, i + 1); - const highestHigh = Math.max(...sliceHigh); - const lowestLow = Math.min(...sliceLow); - kijunSen.push((highestHigh + lowestLow) / 2); - } - return kijunSen; - } - - function calculateSenkouSpanA(tenkanSen: number[], kijunSen: number[]): number[] { - const senkouSpanA: number[] = []; - for (let i = 0; i < tenkanSen.length; i++) { - senkouSpanA.push((tenkanSen[i] + kijunSen[i]) / 2); - } - return senkouSpanA; - } - - function calculateSenkouSpanB(high: number[], low: number[], period: number): number[] { - const senkouSpanB: number[] = []; - for (let i = period - 1; i < high.length; i++) { - const sliceHigh = high.slice(i - period + 1, i + 1); - const sliceLow = low.slice(i - period + 1, i + 1); - const highestHigh = Math.max(...sliceHigh); - const lowestLow = Math.min(...sliceLow); - senkouSpanB.push((highestHigh + lowestLow) / 2); - } - return senkouSpanB; - } - - function calculateChikouSpan(close: number[], period: number): number[] { - const chikouSpan: number[] = []; - for (let i = 0; i < close.length - period; i++) { - chikouSpan.push(close[i]); - } - return chikouSpan; - } -} - -/** - * Keltner Channels - */ -export function keltnerChannels( - ohlcv: OHLCVData[], - period: number = 20, - multiplier: number = 2 -): { - upper: number[]; - middle: number[]; - lower: number[]; -} { - const atrValues = atr(ohlcv, period); - const middle = sma(ohlcv.map(item => (item.high + item.low + item.close) / 3), period); - const upper: number[] = []; - const lower: number[] = []; - - for (let i = 0; i < middle.length; i++) { - upper.push(middle[i] + multiplier * atrValues[i]); - lower.push(middle[i] - multiplier * atrValues[i]); - } - - return { - upper, - middle, - lower - }; -} - -/** - * Donchian Channels - */ -export function donchianChannels( - ohlcv: OHLCVData[], - period: number = 20 -): { - upper: number[]; - middle: number[]; - lower: number[]; -} { - const upper: number[] = []; - const lower: number[] = []; - const middle: number[] = []; - - for (let i = period - 1; i < ohlcv.length; i++) { - const slice = ohlcv.slice(i - period + 1, i + 1); - const highestHigh = Math.max(...slice.map(item => item.high)); - const lowestLow = Math.min(...slice.map(item => item.low)); - - upper.push(highestHigh); - lower.push(lowestLow); - middle.push((highestHigh + lowestLow) / 2); - } - - return { - upper, - middle, - lower - }; -} - -/** - * Elder-Ray Index - */ -export function elderRay( - ohlcv: OHLCVData[], - period: number = 13 -): { - bullPower: number[]; - bearPower: number[]; -} { - const closePrices = ohlcv.map(item => item.close); - const emaValues = ema(closePrices, period); - const bullPower: number[] = []; - const bearPower: number[] = []; - - // Adjust the indexing to ensure we're matching the correct EMA value with each candle - for (let i = period - 1; i < ohlcv.length; i++) { - // Using the proper index for the EMA values which are aligned with closePrices - // Since ema() returns values starting from the period-th element - const emaIndex = i - (period - 1); - if (emaIndex >= 0 && emaIndex < emaValues.length) { - bullPower.push(ohlcv[i].high - emaValues[emaIndex]); - bearPower.push(ohlcv[i].low - emaValues[emaIndex]); - } - } - - return { - bullPower, - bearPower - }; -} - -/** - * Force Index - */ -export function forceIndex( - ohlcv: OHLCVData[], - period: number = 13 -): number[] { - const forceIndexValues: number[] = []; - - for (let i = 1; i < ohlcv.length; i++) { - const change = ohlcv[i].close - ohlcv[i - 1].close; - const volume = ohlcv[i].volume; - forceIndexValues.push(change * volume); - } - - const smaValues = sma(forceIndexValues, period); - return smaValues; -} - -/** - * Moving Average Envelope - */ -export function movingAverageEnvelope( - prices: number[], - period: number = 20, - percentage: number = 0.05 -): { - upper: number[]; - lower: number[]; - middle: number[]; -} { - const middle = sma(prices, period); - const upper: number[] = middle.map(value => value * (1 + percentage)); - const lower: number[] = middle.map(value => value * (1 - percentage)); - - return { - upper, - lower, - middle - }; -} - -/** - * High-Low Index - */ -export function highLowIndex( - ohlcv: OHLCVData[], - period: number = 14 -): number[] { - const highLowIndexValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - let newHighs = 0; - let newLows = 0; - - for (let j = i - period; j <= i; j++) { - if (ohlcv[j].close === Math.max(...ohlcv.slice(i - period, i + 1).map(item => item.close))) { - newHighs++; - } - if (ohlcv[j].close === Math.min(...ohlcv.slice(i - period, i + 1).map(item => item.close))) { - newLows++; - } - } - - highLowIndexValues.push(((newHighs - newLows) / (newHighs + newLows)) * 100); - } - - return highLowIndexValues; -} - -/** - * Coppock Curve - */ -export function coppockCurve( - prices: number[], - longPeriod: number = 14, - shortPeriod: number = 11, - weightedMovingAveragePeriod: number = 10 -): number[] { - const rocLong = roc(prices, longPeriod); - const rocShort = roc(prices, shortPeriod); - - const sumROC: number[] = rocLong.map((value, index) => value + rocShort[index]); - - return sma(sumROC, weightedMovingAveragePeriod); -} - -/** - * Ease of Movement (EMV) - */ -export function easeOfMovement( - ohlcv: OHLCVData[], - period: number = 14 -): number[] { - const emv: number[] = []; - - for (let i = 1; i < ohlcv.length; i++) { - const distance = ((ohlcv[i].high + ohlcv[i].low) / 2) - ((ohlcv[i - 1].high + ohlcv[i - 1].low) / 2); - const boxRatio = (ohlcv[i].volume / 100000000) / (ohlcv[i].high - ohlcv[i].low); // Scale volume to avoid very small numbers - - emv.push(distance / boxRatio); - } - - return sma(emv, period); -} - -/** - * Mass Index - */ -export function massIndex( - ohlcv: OHLCVData[], - period: number = 9, - emaPeriod: number = 25 -): number[] { - const singleEma: number[] = ema(ohlcv.map(item => item.high - item.low), emaPeriod); - const doubleEma: number[] = ema(singleEma, emaPeriod); - - const massIndexValues: number[] = []; - for (let i = period; i < doubleEma.length; i++) { - let sum = 0; - for (let j = i - period; j < i; j++) { - sum += singleEma[j] / doubleEma[j]; - } - massIndexValues.push(sum); - } - - return massIndexValues; -} - -/** - * Ultimate Oscillator - */ -export function ultimateOscillator( - ohlcv: OHLCVData[], - shortPeriod: number = 7, - mediumPeriod: number = 14, - longPeriod: number = 28 -): number[] { - const ultimateOscillatorValues: number[] = []; - - for (let i = longPeriod; i < ohlcv.length; i++) { - let trueRangeSum = 0; - let buyingPressureSum = 0; - - for (let j = i; j > 0 && j >= i - longPeriod; j--) { - const trueRange = Math.max( - ohlcv[j].high - ohlcv[j].low, - Math.abs(ohlcv[j].high - ohlcv[j - 1].close), - Math.abs(ohlcv[j].low - ohlcv[j - 1].close) - ); - - const buyingPressure = ohlcv[j].close - Math.min(ohlcv[j].low, ohlcv[j - 1].close); - - trueRangeSum += trueRange; - buyingPressureSum += buyingPressure; - } - - const ultimateOscillatorValue = (100 * ( - (4 * buyingPressureSum / trueRangeSum) + - (2 * buyingPressureSum / trueRangeSum) + - (buyingPressureSum / trueRangeSum) - ) / 7); - - ultimateOscillatorValues.push(ultimateOscillatorValue); - } - - return ultimateOscillatorValues; -} - -/** - * Schaff Trend Cycle (STC) - */ -export function schaffTrendCycle( - prices: number[], - period: number = 10, - fastMAPeriod: number = 23, - slowMAPeriod: number = 50 -): number[] { - const macdValues = macd(prices, fastMAPeriod, slowMAPeriod); - const maxValue = Math.max(...macdValues.macd); - const minValue = Math.min(...macdValues.macd); - - const kValues: number[] = macdValues.macd.map(value => (value - minValue) / (maxValue - minValue) * 100); - const dValues: number[] = sma(kValues, period); - - return dValues; -} - -/** - * Hilbert Transform - Instantaneous Trendline - */ -export function hilbertTransformInstantaneousTrendline( - prices: number[] -): number[] { - // This is a placeholder. A full Hilbert Transform implementation is complex. - // Requires significantly more code and signal processing knowledge. - // Returning a simple moving average as a substitute. - return sma(prices, 20); -} - -/** - * Relative Volatility Index (RVI) - */ -export function relativeVolatilityIndex( - ohlcv: OHLCVData[], - period: number = 14 -): number[] { - const rviValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - let highCloseSum = 0; - let lowCloseSum = 0; - - for (let j = i; j > 0 && j >= i - period; j--) { - highCloseSum += Math.pow(ohlcv[j].high - ohlcv[j].close, 2); - lowCloseSum += Math.pow(ohlcv[j].low - ohlcv[j].close, 2); - } - - const highCloseStdDev = Math.sqrt(highCloseSum / period); - const lowCloseStdDev = Math.sqrt(lowCloseSum / period); - - const rviValue = 100 * highCloseStdDev / (highCloseStdDev + lowCloseStdDev); - rviValues.push(rviValue); - } - - return rviValues; -} - -/** - * Chande Momentum Oscillator (CMO) - */ -export function chandeMomentumOscillator(prices: number[], period: number = 14): number[] { - const cmoValues: number[] = []; - - for (let i = period; i < prices.length; i++) { - let sumOfGains = 0; - let sumOfLosses = 0; - - for (let j = i; j > 0 && j >= i - period; j--) { - const change = prices[j] - prices[j - 1]; - if (change > 0) { - sumOfGains += change; - } else { - sumOfLosses += Math.abs(change); - } - } - - const cmoValue = 100 * (sumOfGains - sumOfLosses) / (sumOfGains + sumOfLosses); - cmoValues.push(cmoValue); - } - - return cmoValues; -} - -/** - * Detrended Price Oscillator (DPO) - */ -export function detrendedPriceOscillator(prices: number[], period: number = 20): number[] { - const dpoValues: number[] = []; - const smaValues = sma(prices, period); - - for (let i = period; i < prices.length; i++) { - const dpoValue = prices[i - Math.floor(period / 2) - 1] - smaValues[i - period]; - dpoValues.push(dpoValue); - } - - return dpoValues; -} - -/** - * Fractal Chaos Bands - */ -export function fractalChaosBands(ohlcv: OHLCVData[], period: number = 20): { upper: number[], lower: number[] } { - const upper: number[] = []; - const lower: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - const slice = ohlcv.slice(i - period + 1, i + 1); - const highestHigh = Math.max(...slice.map(item => item.high)); - const lowestLow = Math.min(...slice.map(item => item.low)); - - upper.push(highestHigh); - lower.push(lowestLow); - } - - return { - upper, - lower - }; -} - -/** - * Know Sure Thing (KST) Oscillator - */ -export function knowSureThing( - prices: number[], - rocPeriod1: number = 10, - rocPeriod2: number = 15, - rocPeriod3: number = 20, - rocPeriod4: number = 30, - smaPeriod1: number = 10, - smaPeriod2: number = 10, - smaPeriod3: number = 10, - smaPeriod4: number = 15 -): number[] { - const roc1 = roc(prices, rocPeriod1); - const roc2 = roc(prices, rocPeriod2); - const roc3 = roc(prices, rocPeriod3); - const roc4 = roc(prices, rocPeriod4); - - const sma1 = sma(roc1, smaPeriod1); - const sma2 = sma(roc2, smaPeriod2); - const sma3 = sma(roc3, smaPeriod3); - const sma4 = sma(roc4, smaPeriod4); - - const kstValues: number[] = []; - - for (let i = 0; i < sma1.length; i++) { - const kstValue = sma1[i] + sma2[i] + sma3[i] + sma4[i]; - kstValues.push(kstValue); - } - - return kstValues; -} - -/** - * Percentage Price Oscillator (PPO) - */ -export function percentagePriceOscillator( - prices: number[], - fastPeriod: number = 12, - slowPeriod: number = 26 -): number[] { - const fastEMA = ema(prices, fastPeriod); - const slowEMA = ema(prices, slowPeriod); - - const ppoValues: number[] = []; - - for (let i = 0; i < fastEMA.length; i++) { - const ppoValue = ((fastEMA[i] - slowEMA[i]) / slowEMA[i]) * 100; - ppoValues.push(ppoValue); - } - - return ppoValues; -} - -/** - * Price Volume Trend (PVT) - */ -export function priceVolumeTrend(ohlcv: OHLCVData[]): number[] { - const pvtValues: number[] = [0]; // Initialize with 0 - - for (let i = 1; i < ohlcv.length; i++) { - const change = (ohlcv[i].close - ohlcv[i - 1].close) / ohlcv[i - 1].close; - const pvtValue = pvtValues[i - 1] + (change * ohlcv[i].volume); - pvtValues.push(pvtValue); - } - - return pvtValues; -} - -/** - * Q Stick - */ -export function qStick(ohlcv: OHLCVData[], period: number = 10): number[] { - const qStickValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - let sum = 0; - for (let j = i; j > 0 && j >= i - period; j--) { - sum += ohlcv[j].close - ohlcv[j].open; - } - qStickValues.push(sum / period); - } - - return qStickValues; -} - -/** - * TRIX (Triple Exponentially Smoothed Average) - */ -export function trix(prices: number[], period: number = 18): number[] { - const ema1 = ema(prices, period); - const ema2 = ema(ema1, period); - const ema3 = ema(ema2, period); - - const trixValues: number[] = []; - - for (let i = 1; i < ema3.length; i++) { - const trixValue = ((ema3[i] - ema3[i - 1]) / ema3[i - 1]) * 100; - trixValues.push(trixValue); - } - - return trixValues; -} - -/** - * Vertical Horizontal Filter (VHF) - */ -export function verticalHorizontalFilter(ohlcv: OHLCVData[], period: number = 28): number[] { - const vhfValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - const slice = ohlcv.slice(i - period + 1, i + 1); - const highestHigh = Math.max(...slice.map(item => item.high)); - const lowestLow = Math.min(...slice.map(item => item.low)); - const closeChanges: number[] = []; - - for (let j = 1; j < slice.length; j++) { - closeChanges.push(Math.abs(slice[j].close - slice[j - 1].close)); - } - - const sumOfCloseChanges = closeChanges.reduce((a, b) => a + b, 0); - const vhfValue = (highestHigh - lowestLow) / sumOfCloseChanges; - vhfValues.push(vhfValue); - } - - return vhfValues; -} - -/** - * Volume Rate of Change (VROC) - */ -export function volumeRateOfChange(ohlcv: OHLCVData[], period: number = 10): number[] { - const vrocValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - if (ohlcv[i - period].volume === 0) { - vrocValues.push(0); // Avoid division by zero - } else { - const vrocValue = ((ohlcv[i].volume - ohlcv[i - period].volume) / ohlcv[i - period].volume) * 100; - vrocValues.push(vrocValue); - } - } - - return vrocValues; -} - -/** - * Average True Range Trailing Stops - * Calculates trailing stop levels based on ATR - */ -export function atrTrailingStops( - ohlcv: OHLCVData[], - period: number = 14, - multiplier: number = 3 -): { - longStop: number[]; - shortStop: number[]; -} { - const atrValues = atr(ohlcv, period); - const longStop: number[] = []; - const shortStop: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - longStop.push(ohlcv[i].low - multiplier * atrValues[i - period]); - shortStop.push(ohlcv[i].high + multiplier * atrValues[i - period]); - } - - return { - longStop, - shortStop - }; -} - -/** - * Elder's Force Index - * Measures the strength of a trend by combining price and volume - */ -export function eldersForceIndex( - ohlcv: OHLCVData[], - period: number = 13 -): number[] { - const forceIndexValues: number[] = []; - - for (let i = 1; i < ohlcv.length; i++) { - const change = ohlcv[i].close - ohlcv[i - 1].close; - const volume = ohlcv[i].volume; - forceIndexValues.push(change * volume); - } - - return ema(forceIndexValues, period); -} - -/** - * Ultimate Oscillator - */ -export function trueStrengthIndex( - prices: number[], - longPeriod: number = 25, - shortPeriod: number = 13, - signalPeriod: number = 9 -): number[] { - const priceChanges: number[] = []; - for (let i = 1; i < prices.length; i++) { - priceChanges.push(prices[i] - prices[i - 1]); - } - - const smoothedMomentum = ema(priceChanges, shortPeriod); - const doubleSmoothedMomentum = ema(smoothedMomentum, longPeriod); - - const absoluteMomentum = priceChanges.map(Math.abs); - const smoothedAbsoluteMomentum = ema(absoluteMomentum, shortPeriod); - const doubleSmoothedAbsoluteMomentum = ema(smoothedAbsoluteMomentum, longPeriod); - - const tsiValues: number[] = []; - for (let i = longPeriod; i < prices.length - 1; i++) { - tsiValues.push( - (doubleSmoothedMomentum[i - longPeriod] / doubleSmoothedAbsoluteMomentum[i - longPeriod]) * 100 - ); - } - - return tsiValues; -} - -/** - * Money Flow Multiplier - * Calculates the Money Flow Multiplier - */ -export function moneyFlowMultiplier(ohlcv: OHLCVData[]): number[] { - return ohlcv.map(candle => ((candle.close - candle.low) - (candle.high - candle.close)) / (candle.high - candle.low)); -} - -/** - * Positive Volume Index (PVI) - */ -export function positiveVolumeIndex(ohlcv: OHLCVData[], initialValue: number = 1000): number[] { - const pviValues: number[] = [initialValue]; - - for (let i = 1; i < ohlcv.length; i++) { - if (ohlcv[i].volume > ohlcv[i - 1].volume) { - const change = (ohlcv[i].close - ohlcv[i - 1].close) / ohlcv[i - 1].close; - pviValues.push(pviValues[i - 1] + (pviValues[i - 1] * change)); - } else { - pviValues.push(pviValues[i - 1]); - } - } - - return pviValues; -} - -/** - * Negative Volume Index (NVI) - */ -export function negativeVolumeIndex(ohlcv: OHLCVData[], initialValue: number = 1000): number[] { - const nviValues: number[] = [initialValue]; - - for (let i = 1; i < ohlcv.length; i++) { - if (ohlcv[i].volume < ohlcv[i - 1].volume) { - const change = (ohlcv[i].close - ohlcv[i - 1].close) / ohlcv[i - 1].close; - nviValues.push(nviValues[i - 1] + (nviValues[i - 1] * change)); - } else { - nviValues.push(nviValues[i - 1]); - } - } - - return nviValues; -} - -/** - * Typical Price - * Calculates the typical price for each period - */ -export function typicalPrice(ohlcv: OHLCVData[]): number[] { - return ohlcv.map(candle => (candle.high + candle.low + candle.close) / 3); -} - -/** - * Median Price - * Calculates the median price for each period - */ -export function medianPrice(ohlcv: OHLCVData[]): number[] { - return ohlcv.map(candle => (candle.high + candle.low) / 2); -} - -/** - * On Balance Volume Mean (OBV Mean) - * Calculates the mean of the On Balance Volume (OBV) values. - */ -export function onBalanceVolumeMean(ohlcv: OHLCVData[], period: number = 14): number[] { - const obvValues = obv(ohlcv); - return sma(obvValues, period); -} - -/** - * Kaufman's Adaptive Moving Average (KAMA) - */ -export function kama(prices: number[], period: number = 10, fastPeriod: number = 2, slowPeriod: number = 30): number[] { - const kamaValues: number[] = []; - - if (prices.length <= period) { - return kamaValues; - } - - // Calculate the initial KAMA using SMA - const firstSMA = prices.slice(0, period).reduce((sum, price) => sum + price, 0) / period; - let kama = firstSMA; - kamaValues.push(kama); - - // Constants for the calculation - const fastConst = 2 / (fastPeriod + 1); - const slowConst = 2 / (slowPeriod + 1); - - for (let i = period; i < prices.length; i++) { - // Calculate direction - the numerator of the efficiency ratio - const direction = Math.abs(prices[i] - prices[i - period]); - - // Calculate volatility - the denominator of the efficiency ratio - let volatility = 0; - for (let j = i - period + 1; j <= i; j++) { - volatility += Math.abs(prices[j] - prices[j - 1]); - } - - // Calculate efficiency ratio (ER) - // Handle the case where volatility is zero to avoid division by zero - const er = volatility === 0 ? 1 : Math.min(direction / volatility, 1); - - // Calculate smoothing constant (SC) - const sc = Math.pow(er * (fastConst - slowConst) + slowConst, 2); - - // Calculate KAMA - kama = kama + sc * (prices[i] - kama); - kamaValues.push(kama); - } - - return kamaValues; -} - -/** - * DeMarker - */ -export function deMarker(ohlcv: OHLCVData[], period: number = 14): number[] { - const deMax: number[] = []; - const deMin: number[] = []; - - for (let i = 1; i < ohlcv.length; i++) { - deMax.push(ohlcv[i].high > ohlcv[i - 1].high ? ohlcv[i].high - ohlcv[i - 1].high : 0); - deMin.push(ohlcv[i].low < ohlcv[i - 1].low ? ohlcv[i - 1].low - ohlcv[i].low : 0); - } - - const sumDeMax = sma(deMax, period); - const sumDeMin = sma(deMin, period); - - const deMarkerValues: number[] = []; - for (let i = period; i < ohlcv.length; i++) { - deMarkerValues.push(sumDeMax[i - period] / (sumDeMax[i - period] + sumDeMin[i - period])); - } - - return deMarkerValues; -} - -/** - * Elder's SafeZone Stops - */ -export function eldersSafeZoneStops(ohlcv: OHLCVData[], atrPeriod: number = 20, percentageRisk: number = 2): { longStop: number[], shortStop: number[] } { - const atrValues = atr(ohlcv, atrPeriod); - const longStop: number[] = []; - const shortStop: number[] = []; - - for (let i = atrPeriod; i < ohlcv.length; i++) { - longStop.push(ohlcv[i].low - (atrValues[i - atrPeriod] * (percentageRisk / 100))); - shortStop.push(ohlcv[i].high + (atrValues[i - atrPeriod] * (percentageRisk / 100))); - } - - return { - longStop, - shortStop - }; -} - -/** - * Projection Oscillator - */ -export function projectionOscillator(ohlcv: OHLCVData[], period: number = 14): number[] { - const projectionOscillatorValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - let highestHigh = ohlcv[i - period].high; - let lowestLow = ohlcv[i - period].low; - - for (let j = i - period; j < i; j++) { - if (ohlcv[j].high > highestHigh) { - highestHigh = ohlcv[j].high; - } - if (ohlcv[j].low < lowestLow) { - lowestLow = ohlcv[j].low; - } - } - - const projectionOscillatorValue = ((ohlcv[i].close - lowestLow) / (highestHigh - lowestLow)) * 100; - projectionOscillatorValues.push(projectionOscillatorValue); - } - - return projectionOscillatorValues; -} - -/** - * Twiggs Money Flow - */ -export function twiggsMoneyFlow(ohlcv: OHLCVData[]): number[] { - const twiggsMoneyFlowValues: number[] = []; - - for (let i = 0; i < ohlcv.length; i++) { - const moneyFlowVolume = ohlcv[i].volume * (((ohlcv[i].close - ohlcv[i].low) - (ohlcv[i].high - ohlcv[i].close)) / (ohlcv[i].high - ohlcv[i].low)); - twiggsMoneyFlowValues.push(moneyFlowVolume); - } - - return twiggsMoneyFlowValues; -} - - -/** - * Relative Strength - * Compares the performance of one asset to another - */ -export function relativeStrength(prices1: number[], prices2: number[], period: number = 14): number[] { - const rsValues: number[] = []; - const sma1 = sma(prices1, period); - const sma2 = sma(prices2, period); - - for (let i = 0; i < sma1.length; i++) { - rsValues.push(sma1[i] / sma2[i]); - } - - return rsValues; -} - -/** - * Correlation Coefficient - * Measures the statistical relationship between two assets - */ -export function correlationCoefficient(prices1: number[], prices2: number[], period: number = 14): number[] { - const correlationValues: number[] = []; - - for (let i = period; i < prices1.length; i++) { - const slice1 = prices1.slice(i - period, i); - const slice2 = prices2.slice(i - period, i); - - const mean1 = slice1.reduce((a, b) => a + b, 0) / period; - const mean2 = slice2.reduce((a, b) => a + b, 0) / period; - - let sumXY = 0; - let sumX2 = 0; - let sumY2 = 0; - - for (let j = 0; j < period; j++) { - sumXY += (slice1[j] - mean1) * (slice2[j] - mean2); - sumX2 += Math.pow(slice1[j] - mean1, 2); - sumY2 += Math.pow(slice2[j] - mean2, 2); - } - - const correlation = sumXY / (Math.sqrt(sumX2) * Math.sqrt(sumY2)); - correlationValues.push(correlation); - } - - return correlationValues; -} - -/** - * Coppock Range - * Calculates the range between high and low Coppock values - */ -export function coppockRange(prices: number[], longPeriod: number = 14, shortPeriod: number = 11, wmaPeriod: number = 10): { high: number[], low: number[] } { - const coppockValues = coppockCurve(prices, longPeriod, shortPeriod, wmaPeriod); - const highValues: number[] = []; - const lowValues: number[] = []; - - for (let i = 1; i < coppockValues.length; i++) { - highValues.push(Math.max(coppockValues[i], coppockValues[i - 1])); - lowValues.push(Math.min(coppockValues[i], coppockValues[i - 1])); - } - - return { - high: highValues, - low: lowValues - }; -} - -/** - * Chaikin Oscillator - * Calculates the difference between two moving averages of the Accumulation/Distribution Line - */ -export function chaikinOscillator(ohlcv: OHLCVData[], fastPeriod: number = 3, slowPeriod: number = 10): number[] { - const adlValues = accumulationDistribution(ohlcv); - const fastMA = ema(adlValues, fastPeriod); - const slowMA = ema(adlValues, slowPeriod); - - const chaikinOscillatorValues: number[] = []; - for (let i = 0; i < fastMA.length; i++) { - chaikinOscillatorValues.push(fastMA[i] - slowMA[i]); - } - - return chaikinOscillatorValues; -} - -/** - * Prime Number Oscillator - * Uses prime numbers to create an oscillator - */ -export function primeNumberOscillator(prices: number[], period: number = 14): number[] { - const primeNumbers = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43]; // First 14 prime numbers - const pnoValues: number[] = []; - - for (let i = period; i < prices.length; i++) { - let sum = 0; - for (let j = 0; j < period; j++) { - sum += prices[i - j] * primeNumbers[j]; - } - pnoValues.push(sum); - } - - return pnoValues; -} - -/** - * Fractal Efficiency - * Measures the efficiency of price movement based on fractal dimension - */ -export function fractalEfficiency(ohlcv: OHLCVData[], period: number = 20): number[] { - const fractalEfficiencyValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - let netDistance = 0; - for (let j = i; j > i - period; j--) { - netDistance += Math.sqrt(Math.pow(ohlcv[j].close - ohlcv[j - 1].close, 2)); - } - - const straightLineDistance = Math.sqrt(Math.pow(ohlcv[i].close - ohlcv[i - period].close, 2)); - const fractalEfficiencyValue = straightLineDistance / netDistance; - fractalEfficiencyValues.push(fractalEfficiencyValue); - } - - return fractalEfficiencyValues; -} - -/** - * Market Facilitation Index (MFI) - */ -export function marketFacilitationIndex(ohlcv: OHLCVData[]): number[] { - const mfiValues: number[] = []; - - for (let i = 0; i < ohlcv.length; i++) { - const range = ohlcv[i].high - ohlcv[i].low; - const mfiValue = range / ohlcv[i].volume; - mfiValues.push(mfiValue); - } - - return mfiValues; -} - -/** - * Elder-Disk - * Combination of Elder-Ray and Force Index - */ -export function elderDisk(ohlcv: OHLCVData[], period: number = 13): number[] { - const { bullPower, bearPower } = elderRay(ohlcv, period); - const forceIndexValues = forceIndex(ohlcv, period); - - const elderDiskValues: number[] = []; - for (let i = 0; i < bullPower.length; i++) { - elderDiskValues.push(bullPower[i] + bearPower[i] + forceIndexValues[i]); - } - - return elderDiskValues; -} - -/** - * Relative Vigor Index (RVI) - */ -export function relativeVigorIndex(ohlcv: OHLCVData[], period: number = 10): number[] { - const rviValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - let sumNumerator = 0; - let sumDenominator = 0; - - for (let j = i; j > i - period; j--) { - sumNumerator += (ohlcv[j].close - ohlcv[j].open) * (ohlcv[j].high - ohlcv[j].low); - sumDenominator += (ohlcv[j].high - ohlcv[j].low) * (ohlcv[j].high - ohlcv[j].low); - } - - const rviValue = sumDenominator !== 0 ? sumNumerator / sumDenominator : 0; - rviValues.push(rviValue); - } - - return rviValues; -} - -/** - * Balance of Power (BOP) - */ -export function balanceOfPower(ohlcv: OHLCVData[]): number[] { - const bopValues: number[] = []; - - for (let i = 0; i < ohlcv.length; i++) { - const range = ohlcv[i].high - ohlcv[i].low; - const bopValue = range !== 0 ? (ohlcv[i].close - ohlcv[i].open) / range : 0; - bopValues.push(bopValue); - } - - return bopValues; -} - -/** - * Stochastic RSI - * Combines Stochastic Oscillator and RSI to provide overbought/oversold signals - */ -export function stochasticRSI( - prices: number[], - rsiPeriod: number = 14, - stochasticPeriod: number = 14, - smoothPeriod: number = 3 -): { k: number[]; d: number[] } { - const rsiValues = rsi(prices, rsiPeriod); - return stochastic( - rsiValues.map(rsi => ({ high: rsi, low: rsi, close: rsi, open: rsi, volume: 0 } as OHLCVData)), - stochasticPeriod, - smoothPeriod - ); -} - -/** - * StochRSI Fast - */ -export function stochRSIFast( - prices: number[], - rsiPeriod: number = 14, - stochasticPeriod: number = 14 -): { k: number[]; d: number[] } { - const rsiValues = rsi(prices, rsiPeriod); - return stochastic( - rsiValues.map(rsi => ({ high: rsi, low: rsi, close: rsi, open: rsi, volume: 0 } as OHLCVData)), - stochasticPeriod, - 1 - ); -} - -/** - * StochRSI Full - */ -export function stochRSIFull( - prices: number[], - rsiPeriod: number = 14, - stochasticPeriod: number = 14, - kSmoothPeriod: number = 3, - dSmoothPeriod: number = 3 -): { k: number[]; d: number[] } { - const rsiValues = rsi(prices, rsiPeriod); - const { k } = stochastic( - rsiValues.map(rsi => ({ high: rsi, low: rsi, close: rsi, open: rsi, volume: 0 } as OHLCVData)), - stochasticPeriod, - kSmoothPeriod - ); - const d = sma(k, dSmoothPeriod); - return { k, d }; -} - -/** - * Normalized Average True Range (NATR) - */ -export function normalizedAverageTrueRange(ohlcv: OHLCVData[], period: number = 14): number[] { - const atrValues = atr(ohlcv, period); - const natrValues: number[] = []; - - for (let i = 0; i < atrValues.length; i++) { - natrValues.push((atrValues[i] / ohlcv[i].close) * 100); - } - - return natrValues; -} - -/** - * Pretty Good Oscillator (PGO) - */ -export function prettyGoodOscillator(ohlcv: OHLCVData[], period: number = 14): number[] { - const pgoValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - let sumHighLow = 0; - let sumCloseOpen = 0; - - for (let j = i; j > i - period; j--) { - sumHighLow += ohlcv[j].high - ohlcv[j].low; - sumCloseOpen += ohlcv[j].close - ohlcv[j].open; - } - - const pgoValue = sumHighLow !== 0 ? sumCloseOpen / sumHighLow : 0; - pgoValues.push(pgoValue); - } - - return pgoValues; -} - -/** - * Intraday Intensity Index (III) - */ -export function intradayIntensityIndex(ohlcv: OHLCVData[]): number[] { - const iiiValues: number[] = []; - - for (let i = 0; i < ohlcv.length; i++) { - const volume = ohlcv[i].volume; - const range = ohlcv[i].high - ohlcv[i].low; - const iiiValue = range !== 0 ? ((2 * ohlcv[i].close - ohlcv[i].high - ohlcv[i].low) / range) * volume : 0; - iiiValues.push(iiiValue); - } - - return iiiValues; -} - -/** - * Money Flow Chaikin A/D Oscillator - * Uses the Chaikin A/D line to create an oscillator - */ -export function moneyFlowChaikinOscillator(ohlcv: OHLCVData[], fastPeriod: number = 3, slowPeriod: number = 10): number[] { - const adlValues = accumulationDistribution(ohlcv); - const fastMA = ema(adlValues, fastPeriod); - const slowMA = ema(adlValues, slowPeriod); - - const moneyFlowChaikinOscillatorValues: number[] = []; - for (let i = 0; i < fastMA.length; i++) { - moneyFlowChaikinOscillatorValues.push(fastMA[i] - slowMA[i]); - } - - return moneyFlowChaikinOscillatorValues; -} - -/** - * Elder's Thermometer - * Uses high and low prices to gauge market temperature - */ -export function eldersThermometer(ohlcv: OHLCVData[], period: number = 20): number[] { - const eldersThermometerValues: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - let sumOfHighs = 0; - let sumOfLows = 0; - - for (let j = i; j > i - period; j--) { - sumOfHighs += ohlcv[j].high; - sumOfLows += ohlcv[j].low; - } - - const averageHigh = sumOfHighs / period; - const averageLow = sumOfLows / period; - const thermometerValue = averageHigh - averageLow; - eldersThermometerValues.push(thermometerValue); - } - - return eldersThermometerValues; -} - -/** - * High-Low Range - * Calculates the range between high and low prices - */ -export function highLowRange(ohlcv: OHLCVData[]): number[] { - return ohlcv.map(candle => candle.high - candle.low); -} - -/** - * Typical Price Range - * Calculates the range of typical prices - */ -export function typicalPriceRange(ohlcv: OHLCVData[]): number[] { - const typicalPrices = typicalPrice(ohlcv); - const typicalPriceRangeValues: number[] = []; - - for (let i = 1; i < typicalPrices.length; i++) { - typicalPriceRangeValues.push(typicalPrices[i] - typicalPrices[i - 1]); - } - - return typicalPriceRangeValues; -} - -/** - * Median Price Range - * Calculates the range of median prices - */ -export function medianPriceRange(ohlcv: OHLCVData[]): number[] { - const medianPrices = medianPrice(ohlcv); - const medianPriceRangeValues: number[] = []; - - for (let i = 1; i < medianPrices.length; i++) { - medianPriceRangeValues.push(medianPrices[i] - medianPrices[i - 1]); - } - - return medianPriceRangeValues; -} - -/** - * Center of Gravity - */ -export function centerOfGravity(prices: number[], period: number = 10): number[] { - const cogValues: number[] = []; - - for (let i = period; i < prices.length; i++) { - let weightedSum = 0; - let sumOfWeights = 0; - - for (let j = 1; j <= period; j++) { - weightedSum += j * prices[i - period + j]; - sumOfWeights += j; - } - - const cogValue = weightedSum / sumOfWeights; - cogValues.push(cogValue); - } - - return cogValues; -} - -/** - * Linear Regression Indicator - */ -export function linearRegressionIndicator(prices: number[], period: number = 14): number[] { - const lriValues: number[] = []; - - if (prices.length < period) { - return lriValues; - } - - for (let i = period; i < prices.length; i++) { - const slice = prices.slice(i - period, i); - - // Calculate means for normalization (increases numerical stability) - const meanX = (period + 1) / 2; // Mean of 1,2,3,...,period - let meanY = 0; - for (let j = 0; j < period; j++) { - meanY += slice[j]; - } - meanY /= period; - - // Calculate covariance and variance with normalized data - let covariance = 0; - let variance = 0; - - for (let j = 0; j < period; j++) { - const xDiff = (j + 1) - meanX; - const yDiff = slice[j] - meanY; - - covariance += xDiff * yDiff; - variance += xDiff * xDiff; - } - - // Avoid division by zero - const slope = variance !== 0 ? covariance / variance : 0; - const intercept = meanY - slope * meanX; - - // Calculate the predicted value at the end of the period - const lriValue = slope * period + intercept; - lriValues.push(lriValue); - } - - return lriValues; -} - -/** - * Standard Deviation - * Calculates the standard deviation of a set of values - */ -export function standardDeviation(prices: number[], period: number = 20): number[] { - const stdDevValues: number[] = []; - const smaValues = sma(prices, period); - - for (let i = period - 1; i < prices.length; i++) { - const slice = prices.slice(i - period + 1, i + 1); - const mean = smaValues[i - period + 1]; - let sumOfSquaredDifferences = 0; - - for (const price of slice) { - sumOfSquaredDifferences += Math.pow(price - mean, 2); - } - - const variance = sumOfSquaredDifferences / period; - const stdDevValue = Math.sqrt(variance); - stdDevValues.push(stdDevValue); - } - - return stdDevValues; -} - -/** - * Chaikin A/D Range - * Calculates the range of the Chaikin A/D line - */ -export function chaikinADRange(ohlcv: OHLCVData[]): number[] { - const adValues = accumulationDistribution(ohlcv); - const adRangeValues: number[] = []; - - for (let i = 1; i < adValues.length; i++) { - adRangeValues.push(adValues[i] - adValues[i - 1]); - } - - return adRangeValues; -} - -/** - * Volume Oscillator - * Compares two moving averages of volume - */ -export function volumeOscillator(ohlcv: OHLCVData[], fastPeriod: number = 5, slowPeriod: number = 10): number[] { - const volumes = ohlcv.map(candle => candle.volume); - const fastMA = sma(volumes, fastPeriod); - const slowMA = sma(volumes, slowPeriod); - - const volumeOscillatorValues: number[] = []; - for (let i = 0; i < fastMA.length; i++) { - volumeOscillatorValues.push((fastMA[i] - slowMA[i]) / slowMA[i] * 100); - } - - return volumeOscillatorValues; -} - -/** - * Money Flow Index Range - * Calculates the range of the Money Flow Index - */ -export function moneyFlowIndexRange(ohlcv: OHLCVData[], period: number = 14): number[] { - const mfiValues = mfi(ohlcv, period); - const mfiRangeValues: number[] = []; - - for (let i = 1; i < mfiValues.length; i++) { - mfiRangeValues.push(mfiValues[i] - mfiValues[i - 1]); - } - - return mfiRangeValues; -} - -/** - * On Balance Volume Oscillator - * Calculates the oscillator of the On Balance Volume - */ -export function onBalanceVolumeOscillator(ohlcv: OHLCVData[], fastPeriod: number = 5, slowPeriod: number = 10): number[] { - const obvValues = obv(ohlcv); - const fastMA = sma(obvValues, fastPeriod); - const slowMA = sma(obvValues, slowPeriod); - - const obvOscillatorValues: number[] = []; - for (let i = 0; i < fastMA.length; i++) { - obvOscillatorValues.push((fastMA[i] - slowMA[i]) / slowMA[i] * 100); - } - - return obvOscillatorValues; -} - -/** - * Klinger Oscillator - */ -export function klingerOscillator(ohlcv: OHLCVData[], fastPeriod: number = 34, slowPeriod: number = 55): number[] { - if (ohlcv.length < 2) { - return []; - } - - // Calculate volume force - const volumeForce: number[] = []; - - for (let i = 1; i < ohlcv.length; i++) { - const current = ohlcv[i]; - const previous = ohlcv[i - 1]; - - // Calculate typical prices - const typicalPriceCurrent = (current.high + current.low + current.close) / 3; - const typicalPricePrevious = (previous.high + previous.low + previous.close) / 3; - - // Determine trend - const trend = typicalPriceCurrent > typicalPricePrevious ? 1 : -1; - - // Calculate volume force - const force = trend * ohlcv[i].volume * Math.abs(typicalPriceCurrent - typicalPricePrevious); - volumeForce.push(force); - } - - // Calculate fast and slow EMAs of the volume force - const fastEMA = ema(volumeForce, fastPeriod); - const slowEMA = ema(volumeForce, slowPeriod); - - // Calculate Klinger Oscillator - const klingerOscillatorValues: number[] = []; - - // Both EMAs should have the same starting point - const startIndex = Math.abs(fastEMA.length - slowEMA.length); - const shorterEMA = fastEMA.length < slowEMA.length ? fastEMA : slowEMA; - const longerEMA = fastEMA.length < slowEMA.length ? slowEMA : fastEMA; - - for (let i = 0; i < shorterEMA.length; i++) { - if (fastEMA.length < slowEMA.length) { - klingerOscillatorValues.push(shorterEMA[i] - longerEMA[i + startIndex]); - } else { - klingerOscillatorValues.push(longerEMA[i + startIndex] - shorterEMA[i]); - } - } - - return klingerOscillatorValues; -} - -/** - * Directional Movement Index (DMI) - */ -export function directionalMovementIndex(ohlcv: OHLCVData[], period: number = 14): { plusDI: number[], minusDI: number[] } { - const { plusDI, minusDI } = adx(ohlcv, period); - return { plusDI, minusDI }; -} - -/** - * Elder's Cloud - */ -export function eldersCloud(ohlcv: OHLCVData[], period: number = 20): { upper: number[], lower: number[] } { - const emaValues = ema(ohlcv.map(item => item.close), period); - const atrValues = atr(ohlcv, period); - const upper: number[] = []; - const lower: number[] = []; - - for (let i = 0; i < emaValues.length; i++) { - upper.push(emaValues[i] + atrValues[i]); - lower.push(emaValues[i] - atrValues[i]); - } - - return { - upper, - lower - }; -} - -/** - * Ultimate Moving Average (UMA) - */ -export function ultimateMovingAverage(prices: number[], fastPeriod: number = 7, mediumPeriod: number = 14, slowPeriod: number = 28): number[] { - const fastMA = sma(prices, fastPeriod); - const mediumMA = sma(prices, mediumPeriod); - const slowMA = sma(prices, slowPeriod); - - const umaValues: number[] = []; - for (let i = 0; i < fastMA.length; i++) { - umaValues.push((fastMA[i] + mediumMA[i] + slowMA[i]) / 3); - } - - return umaValues; -} - -/** - * Rainbow Oscillator - */ -export function rainbowOscillator(prices: number[], numberOfMAs: number = 7, periodIncrement: number = 5): number[] { - const maValues: number[][] = []; - for (let i = 1; i <= numberOfMAs; i++) { - maValues.push(sma(prices, i * periodIncrement)); - } - - const rainbowOscillatorValues: number[] = []; - for (let i = 0; i < maValues[0].length; i++) { - let sum = 0; - for (let j = 0; j < numberOfMAs; j++) { - sum += maValues[j][i]; - } - rainbowOscillatorValues.push(sum / numberOfMAs); - } - - return rainbowOscillatorValues; -} - -/** - * Guppy Multiple Moving Average (GMMA) - */ -export function guppyMultipleMovingAverage(prices: number[], shortTermPeriods: number[] = [3, 5, 8, 10, 12, 15], longTermPeriods: number[] = [30, 35, 40, 45, 50, 60]): { shortTermMAs: number[][], longTermMAs: number[][] } { - const shortTermMAs: number[][] = []; - const longTermMAs: number[][] = []; - - for (const period of shortTermPeriods) { - shortTermMAs.push(sma(prices, period)); - } - - for (const period of longTermPeriods) { - longTermMAs.push(sma(prices, period)); - } - - return { shortTermMAs, longTermMAs }; -} - -/** - * Historical Volatility - */ -export function historicalVolatility(prices: number[], period: number = 20): number[] { - const logReturns: number[] = []; - for (let i = 1; i < prices.length; i++) { - logReturns.push(Math.log(prices[i] / prices[i - 1])); - } - - const stdDevs = standardDeviation(logReturns, period); - const historicalVolatilityValues: number[] = []; - - for (const stdDev of stdDevs) { - historicalVolatilityValues.push(stdDev * Math.sqrt(252)); // Annualize - } - - return historicalVolatilityValues; -} - -/** - * Donchian Width - */ -export function donchianWidth(ohlcv: OHLCVData[], period: number = 20): number[] { - const { upper, lower } = donchianChannels(ohlcv, period); - const donchianWidthValues: number[] = []; - - for (let i = 0; i < upper.length; i++) { - donchianWidthValues.push(upper[i] - lower[i]); - } - - return donchianWidthValues; -} - -/** - * Chandelier Exit - */ -export function chandelierExit(ohlcv: OHLCVData[], period: number = 22, multiplier: number = 3): { long: number[], short: number[] } { - const atrValues = atr(ohlcv, period); - const long: number[] = []; - const short: number[] = []; - - for (let i = period; i < ohlcv.length; i++) { - const slice = ohlcv.slice(i - period, i); - const highestHigh = Math.max(...slice.map(item => item.high)); - const lowestLow = Math.min(...slice.map(item => item.low)); - - long.push(highestHigh - multiplier * atrValues[i - period]); - short.push(lowestLow + multiplier * atrValues[i - period]); - } - - return { long, short }; -} - -/** - * Projection Bands - */ -export function projectionBands(ohlcv: OHLCVData[], period: number = 14, stdDevMultiplier: number = 2): { upper: number[], lower: number[] } { - const projectionOscillatorValues = projectionOscillator(ohlcv, period); - const stdDevValues = standardDeviation(projectionOscillatorValues, period); - const upper: number[] = []; - const lower: number[] = []; - - for (let i = 0; i < projectionOscillatorValues.length; i++) { - upper.push(projectionOscillatorValues[i] + stdDevMultiplier * stdDevValues[i]); - lower.push(projectionOscillatorValues[i] - stdDevMultiplier * stdDevValues[i]); - } - - return { upper, lower }; -} - -/** - * Range Action Verification Index (RAVI) - */ -export function rangeActionVerificationIndex(prices: number[], longPeriod: number = 65, shortPeriod: number = 10): number[] { - const longMA = sma(prices, longPeriod); - const shortMA = sma(prices, shortPeriod); - - const raviValues: number[] = []; - for (let i = 0; i < longMA.length; i++) { - raviValues.push((shortMA[i] - longMA[i]) / longMA[i] * 100); - } - - return raviValues; -} - -/** - * Momentum from Current Price - * Calculates momentum using the current price and a previous price. Reduces lag compared to using moving averages. - */ -export function momentumFromCurrentPrice(prices: number[], period: number = 10): number[] { - const result: number[] = []; - - for (let i = period; i < prices.length; i++) { - const momentum = prices[i] - prices[i - period]; - result.push(momentum); - } - - return result; -} - -/** - * Rate of Change from Current Price (ROC) - * Calculates ROC using the current price. - */ -export function rocFromCurrentPrice(prices: number[], period: number = 10): number[] { - const result: number[] = []; - - for (let i = period; i < prices.length; i++) { - if (prices[i - period] === 0) { - result.push(0); - } else { - const rocValue = ((prices[i] - prices[i - period]) / prices[i - period]) * 100; - result.push(rocValue); - } - } - - return result; +/** + * Technical Indicators + * Comprehensive set of technical analysis indicators + */ + +import { OHLCVData } from './index'; + +/** + * Simple Moving Average + */ +export function sma(values: number[], period: number): number[] { + if (period > values.length) return []; + + const result: number[] = []; + + for (let i = period - 1; i < values.length; i++) { + const sum = values.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0); + result.push(sum / period); + } + + return result; +} + +/** + * Exponential Moving Average + */ +export function ema(values: number[], period: number): number[] { + if (period > values.length) return []; + + const result: number[] = []; + const multiplier = 2 / (period + 1); + + // Start with SMA for first value + let ema = values.slice(0, period).reduce((a, b) => a + b, 0) / period; + result.push(ema); + + for (let i = period; i < values.length; i++) { + ema = (values[i] * multiplier) + (ema * (1 - multiplier)); + result.push(ema); + } + + return result; +} + +/** + * Relative Strength Index (RSI) + */ +export function rsi(prices: number[], period: number = 14): number[] { + if (period >= prices.length) return []; + + const gains: number[] = []; + const losses: number[] = []; + + // Calculate gains and losses + for (let i = 1; i < prices.length; i++) { + const change = prices[i] - prices[i - 1]; + gains.push(change > 0 ? change : 0); + losses.push(change < 0 ? Math.abs(change) : 0); + } + + const result: number[] = []; + + // Calculate RSI + for (let i = period - 1; i < gains.length; i++) { + const avgGain = gains.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0) / period; + const avgLoss = losses.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0) / period; + + if (avgLoss === 0) { + result.push(100); + } else { + const rs = avgGain / avgLoss; + const rsiValue = 100 - (100 / (1 + rs)); + result.push(rsiValue); + } + } + + return result; +} + +/** + * Moving Average Convergence Divergence (MACD) + */ +export function macd( + prices: number[], + fastPeriod: number = 12, + slowPeriod: number = 26, + signalPeriod: number = 9 +): { macd: number[], signal: number[], histogram: number[] } { + const fastEMA = ema(prices, fastPeriod); + const slowEMA = ema(prices, slowPeriod); + + const macdLine: number[] = []; + const startIndex = slowPeriod - fastPeriod; + + for (let i = 0; i < fastEMA.length - startIndex; i++) { + macdLine.push(fastEMA[i + startIndex] - slowEMA[i]); + } + + const signalLine = ema(macdLine, signalPeriod); + const histogram: number[] = []; + + const signalStartIndex = signalPeriod - 1; + for (let i = 0; i < signalLine.length; i++) { + histogram.push(macdLine[i + signalStartIndex] - signalLine[i]); + } + + return { + macd: macdLine, + signal: signalLine, + histogram: histogram + }; +} + +/** + * Bollinger Bands + */ +export function bollingerBands( + prices: number[], + period: number = 20, + standardDeviations: number = 2 +): { upper: number[], middle: number[], lower: number[] } { + const middle = sma(prices, period); + const upper: number[] = []; + const lower: number[] = []; + + for (let i = period - 1; i < prices.length; i++) { + const slice = prices.slice(i - period + 1, i + 1); + const mean = slice.reduce((a, b) => a + b, 0) / period; + const variance = slice.reduce((a, b) => a + Math.pow(b - mean, 2), 0) / period; + const stdDev = Math.sqrt(variance); + + const middleValue = middle[i - period + 1]; + upper.push(middleValue + (standardDeviations * stdDev)); + lower.push(middleValue - (standardDeviations * stdDev)); + } + + return { upper, middle, lower }; +} + +/** + * Average True Range (ATR) + */ +export function atr(ohlcv: OHLCVData[], period: number = 14): number[] { + if (period >= ohlcv.length) return []; + + const trueRanges: number[] = []; + + for (let i = 1; i < ohlcv.length; i++) { + const high = ohlcv[i].high; + const low = ohlcv[i].low; + const prevClose = ohlcv[i - 1].close; + + const tr = Math.max( + high - low, + Math.abs(high - prevClose), + Math.abs(low - prevClose) + ); + + trueRanges.push(tr); + } + + return sma(trueRanges, period); +} + +/** + * Stochastic Oscillator + */ +export function stochastic( + ohlcv: OHLCVData[], + kPeriod: number = 14, + dPeriod: number = 3 +): { k: number[], d: number[] } { + if (kPeriod >= ohlcv.length) return { k: [], d: [] }; + + const kValues: number[] = []; + + for (let i = kPeriod - 1; i < ohlcv.length; i++) { + const slice = ohlcv.slice(i - kPeriod + 1, i + 1); + const highest = Math.max(...slice.map(d => d.high)); + const lowest = Math.min(...slice.map(d => d.low)); + const currentClose = ohlcv[i].close; + + if (highest === lowest) { + kValues.push(50); // Avoid division by zero + } else { + const kValue = ((currentClose - lowest) / (highest - lowest)) * 100; + kValues.push(kValue); + } + } + + const dValues = sma(kValues, dPeriod); + + return { k: kValues, d: dValues }; +} + +/** + * Williams %R + */ +export function williamsR(ohlcv: OHLCVData[], period: number = 14): number[] { + if (period >= ohlcv.length) return []; + + const result: number[] = []; + + for (let i = period - 1; i < ohlcv.length; i++) { + const slice = ohlcv.slice(i - period + 1, i + 1); + const highest = Math.max(...slice.map(d => d.high)); + const lowest = Math.min(...slice.map(d => d.low)); + const currentClose = ohlcv[i].close; + + if (highest === lowest) { + result.push(-50); // Avoid division by zero + } else { + const wrValue = ((highest - currentClose) / (highest - lowest)) * -100; + result.push(wrValue); + } + } + + return result; +} + +/** + * Commodity Channel Index (CCI) + */ +export function cci(ohlcv: OHLCVData[], period: number = 20): number[] { + if (period >= ohlcv.length) return []; + + const typicalPrices = ohlcv.map(d => (d.high + d.low + d.close) / 3); + const smaTP = sma(typicalPrices, period); + const result: number[] = []; + + for (let i = 0; i < smaTP.length; i++) { + const slice = typicalPrices.slice(i, i + period); + const mean = smaTP[i]; + const meanDeviation = slice.reduce((sum, value) => sum + Math.abs(value - mean), 0) / period; + + if (meanDeviation === 0) { + result.push(0); + } else { + const cciValue = (typicalPrices[i + period - 1] - mean) / (0.015 * meanDeviation); + result.push(cciValue); + } + } + + return result; +} + +/** + * Momentum + */ +export function momentum(prices: number[], period: number = 10): number[] { + if (period >= prices.length) return []; + + const result: number[] = []; + + for (let i = period; i < prices.length; i++) { + const momentum = prices[i] - prices[i - period]; + result.push(momentum); + } + + return result; +} + +/** + * Rate of Change (ROC) + */ +export function roc(prices: number[], period: number = 10): number[] { + if (period >= prices.length) return []; + + const result: number[] = []; + + for (let i = period; i < prices.length; i++) { + if (prices[i - period] === 0) { + result.push(0); + } else { + const rocValue = ((prices[i] - prices[i - period]) / prices[i - period]) * 100; + result.push(rocValue); + } + } + + return result; +} + +/** + * Money Flow Index (MFI) + */ +export function mfi(ohlcv: OHLCVData[], period: number = 14): number[] { + if (period >= ohlcv.length) return []; + + const typicalPrices = ohlcv.map(d => (d.high + d.low + d.close) / 3); + const moneyFlows = ohlcv.map((d, i) => typicalPrices[i] * d.volume); + + const result: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + let positiveFlow = 0; + let negativeFlow = 0; + + for (let j = i - period + 1; j <= i; j++) { + if (j > 0) { + if (typicalPrices[j] > typicalPrices[j - 1]) { + positiveFlow += moneyFlows[j]; + } else if (typicalPrices[j] < typicalPrices[j - 1]) { + negativeFlow += moneyFlows[j]; + } + } + } + + if (negativeFlow === 0) { + result.push(100); + } else { + const mfiRatio = positiveFlow / negativeFlow; + const mfiValue = 100 - (100 / (1 + mfiRatio)); + result.push(mfiValue); + } + } + + return result; +} + +/** + * On-Balance Volume (OBV) + */ +export function obv(ohlcv: OHLCVData[]): number[] { + if (ohlcv.length === 0) return []; + + const result: number[] = [ohlcv[0].volume]; + + for (let i = 1; i < ohlcv.length; i++) { + const prev = ohlcv[i - 1]; + const curr = ohlcv[i]; + + if (curr.close > prev.close) { + result.push(result[result.length - 1] + curr.volume); + } else if (curr.close < prev.close) { + result.push(result[result.length - 1] - curr.volume); + } else { + result.push(result[result.length - 1]); + } + } + + return result; +} + +/** + * Accumulation/Distribution Line + */ +export function accumulationDistribution(ohlcv: OHLCVData[]): number[] { + if (ohlcv.length === 0) return []; + + const result: number[] = []; + let adLine = 0; + + for (const candle of ohlcv) { + if (candle.high === candle.low) { + // Avoid division by zero + result.push(adLine); + continue; + } + + const moneyFlowMultiplier = ((candle.close - candle.low) - (candle.high - candle.close)) / (candle.high - candle.low); + const moneyFlowVolume = moneyFlowMultiplier * candle.volume; + adLine += moneyFlowVolume; + result.push(adLine); + } + + return result; +} + +/** + * Chaikin Money Flow (CMF) + */ +export function chaikinMoneyFlow(ohlcv: OHLCVData[], period: number = 20): number[] { + if (period >= ohlcv.length) return []; + + const adValues: number[] = []; + + for (const candle of ohlcv) { + if (candle.high === candle.low) { + adValues.push(0); + } else { + const moneyFlowMultiplier = ((candle.close - candle.low) - (candle.high - candle.close)) / (candle.high - candle.low); + const moneyFlowVolume = moneyFlowMultiplier * candle.volume; + adValues.push(moneyFlowVolume); + } + } + + const result: number[] = []; + + for (let i = period - 1; i < ohlcv.length; i++) { + const sumAD = adValues.slice(i - period + 1, i + 1).reduce((a, b) => a + b, 0); + const sumVolume = ohlcv.slice(i - period + 1, i + 1).reduce((a, b) => a + b.volume, 0); + + if (sumVolume === 0) { + result.push(0); + } else { + result.push(sumAD / sumVolume); + } + } + + return result; +} + +/** + * Parabolic SAR + */ +export function parabolicSAR( + ohlcv: OHLCVData[], + step: number = 0.02, + maxStep: number = 0.2 +): number[] { + if (ohlcv.length < 2) return []; + + const result: number[] = []; + let trend = 1; // 1 for uptrend, -1 for downtrend + let acceleration = step; + let extremePoint = ohlcv[0].high; + let sar = ohlcv[0].low; + + result.push(sar); + + for (let i = 1; i < ohlcv.length; i++) { + const curr = ohlcv[i]; + const prev = ohlcv[i - 1]; + + // Calculate new SAR + sar = sar + acceleration * (extremePoint - sar); + + if (trend === 1) { // Uptrend + if (curr.low <= sar) { + // Trend reversal + trend = -1; + sar = extremePoint; + extremePoint = curr.low; + acceleration = step; + } else { + if (curr.high > extremePoint) { + extremePoint = curr.high; + acceleration = Math.min(acceleration + step, maxStep); + } + // Ensure SAR doesn't exceed previous lows + sar = Math.min(sar, prev.low, i > 1 ? ohlcv[i - 2].low : prev.low); + } + } else { // Downtrend + if (curr.high >= sar) { + // Trend reversal + trend = 1; + sar = extremePoint; + extremePoint = curr.high; + acceleration = step; + } else { + if (curr.low < extremePoint) { + extremePoint = curr.low; + acceleration = Math.min(acceleration + step, maxStep); + } + // Ensure SAR doesn't exceed previous highs + sar = Math.max(sar, prev.high, i > 1 ? ohlcv[i - 2].high : prev.high); + } + } + + result.push(sar); + } + + return result; +} + +/** + * Aroon Indicator + */ +export function aroon(ohlcv: OHLCVData[], period: number = 14): { up: number[], down: number[] } { + if (period >= ohlcv.length) return { up: [], down: [] }; + + const up: number[] = []; + const down: number[] = []; + + for (let i = period - 1; i < ohlcv.length; i++) { + const slice = ohlcv.slice(i - period + 1, i + 1); + + // Find highest high and lowest low positions + let highestIndex = 0; + let lowestIndex = 0; + + for (let j = 1; j < slice.length; j++) { + if (slice[j].high > slice[highestIndex].high) { + highestIndex = j; + } + if (slice[j].low < slice[lowestIndex].low) { + lowestIndex = j; + } + } + + const aroonUp = ((period - 1 - highestIndex) / (period - 1)) * 100; + const aroonDown = ((period - 1 - lowestIndex) / (period - 1)) * 100; + + up.push(aroonUp); + down.push(aroonDown); + } + + return { up, down }; +} + +/** + * Average Directional Movement Index (ADX) and Directional Movement Indicators (DMI) + */ +export function adx(ohlcv: OHLCVData[], period: number = 14): { adx: number[], plusDI: number[], minusDI: number[] } { + if (period >= ohlcv.length) return { adx: [], plusDI: [], minusDI: [] }; + + const trueRanges: number[] = []; + const plusDM: number[] = []; + const minusDM: number[] = []; + + // Calculate True Range and Directional Movements + for (let i = 1; i < ohlcv.length; i++) { + const current = ohlcv[i]; + const previous = ohlcv[i - 1]; + + // True Range + const tr = Math.max( + current.high - current.low, + Math.abs(current.high - previous.close), + Math.abs(current.low - previous.close) + ); + trueRanges.push(tr); + + // Directional Movements + const highDiff = current.high - previous.high; + const lowDiff = previous.low - current.low; + + const plusDMValue = (highDiff > lowDiff && highDiff > 0) ? highDiff : 0; + const minusDMValue = (lowDiff > highDiff && lowDiff > 0) ? lowDiff : 0; + + plusDM.push(plusDMValue); + minusDM.push(minusDMValue); + } + + // Calculate smoothed averages + const atrValues = sma(trueRanges, period); + const smoothedPlusDM = sma(plusDM, period); + const smoothedMinusDM = sma(minusDM, period); + + const plusDI: number[] = []; + const minusDI: number[] = []; + const dx: number[] = []; + + // Calculate DI+ and DI- + for (let i = 0; i < atrValues.length; i++) { + const diPlus = atrValues[i] > 0 ? (smoothedPlusDM[i] / atrValues[i]) * 100 : 0; + const diMinus = atrValues[i] > 0 ? (smoothedMinusDM[i] / atrValues[i]) * 100 : 0; + + plusDI.push(diPlus); + minusDI.push(diMinus); + + // Calculate DX + const diSum = diPlus + diMinus; + const dxValue = diSum > 0 ? (Math.abs(diPlus - diMinus) / diSum) * 100 : 0; + dx.push(dxValue); + } + + // Calculate ADX (smoothed DX) + const adxValues = sma(dx, period); + + return { + adx: adxValues, + plusDI: plusDI.slice(period - 1), + minusDI: minusDI.slice(period - 1) + }; +} + +/** + * Volume Weighted Moving Average (VWMA) + */ +export function vwma(ohlcv: OHLCVData[], period: number = 20): number[] { + if (period >= ohlcv.length) return []; + + const result: number[] = []; + + for (let i = period - 1; i < ohlcv.length; i++) { + const slice = ohlcv.slice(i - period + 1, i + 1); + + let totalVolumePrice = 0; + let totalVolume = 0; + + for (const candle of slice) { + const typicalPrice = (candle.high + candle.low + candle.close) / 3; + totalVolumePrice += typicalPrice * candle.volume; + totalVolume += candle.volume; + } + + const vwmaValue = totalVolume > 0 ? totalVolumePrice / totalVolume : 0; + result.push(vwmaValue); + } + + return result; +} + +/** + * Pivot Points (Standard) + */ +export function pivotPoints(ohlcv: OHLCVData[]): Array<{ + pivot: number; + resistance1: number; + resistance2: number; + resistance3: number; + support1: number; + support2: number; + support3: number; +}> { + if (ohlcv.length === 0) return []; + + const result: Array<{ + pivot: number; + resistance1: number; + resistance2: number; + resistance3: number; + support1: number; + support2: number; + support3: number; + }> = []; + + for (let i = 0; i < ohlcv.length; i++) { + const candle = ohlcv[i]; + + // Calculate pivot point + const pivot = (candle.high + candle.low + candle.close) / 3; + + // Calculate resistance and support levels + const resistance1 = (2 * pivot) - candle.low; + const support1 = (2 * pivot) - candle.high; + + const resistance2 = pivot + (candle.high - candle.low); + const support2 = pivot - (candle.high - candle.low); + + const resistance3 = candle.high + 2 * (pivot - candle.low); + const support3 = candle.low - 2 * (candle.high - pivot); + + result.push({ + pivot, + resistance1, + resistance2, + resistance3, + support1, + support2, + support3 + }); + } + + return result; +} + +/** + * Ichimoku Cloud + */ +export function ichimokuCloud( + ohlcv: OHLCVData[], + tenkanSenPeriod: number = 9, + kijunSenPeriod: number = 26, + senkouSpanBPeriod: number = 52 +): { + tenkanSen: number[]; + kijunSen: number[]; + senkouSpanA: number[]; + senkouSpanB: number[]; + chikouSpan: number[]; +} { + const { high, low, close } = { + high: ohlcv.map(item => item.high), + low: ohlcv.map(item => item.low), + close: ohlcv.map(item => item.close) + }; + + const tenkanSen = calculateTenkanSen(high, low, tenkanSenPeriod); + const kijunSen = calculateKijunSen(high, low, kijunSenPeriod); + const senkouSpanA = calculateSenkouSpanA(tenkanSen, kijunSen); + const senkouSpanB = calculateSenkouSpanB(high, low, senkouSpanBPeriod); + const chikouSpan = calculateChikouSpan(close, kijunSenPeriod); + + return { + tenkanSen, + kijunSen, + senkouSpanA, + senkouSpanB, + chikouSpan + }; + + function calculateTenkanSen(high: number[], low: number[], period: number): number[] { + const tenkanSen: number[] = []; + for (let i = period - 1; i < high.length; i++) { + const sliceHigh = high.slice(i - period + 1, i + 1); + const sliceLow = low.slice(i - period + 1, i + 1); + const highestHigh = Math.max(...sliceHigh); + const lowestLow = Math.min(...sliceLow); + tenkanSen.push((highestHigh + lowestLow) / 2); + } + return tenkanSen; + } + + function calculateKijunSen(high: number[], low: number[], period: number): number[] { + const kijunSen: number[] = []; + for (let i = period - 1; i < high.length; i++) { + const sliceHigh = high.slice(i - period + 1, i + 1); + const sliceLow = low.slice(i - period + 1, i + 1); + const highestHigh = Math.max(...sliceHigh); + const lowestLow = Math.min(...sliceLow); + kijunSen.push((highestHigh + lowestLow) / 2); + } + return kijunSen; + } + + function calculateSenkouSpanA(tenkanSen: number[], kijunSen: number[]): number[] { + const senkouSpanA: number[] = []; + for (let i = 0; i < tenkanSen.length; i++) { + senkouSpanA.push((tenkanSen[i] + kijunSen[i]) / 2); + } + return senkouSpanA; + } + + function calculateSenkouSpanB(high: number[], low: number[], period: number): number[] { + const senkouSpanB: number[] = []; + for (let i = period - 1; i < high.length; i++) { + const sliceHigh = high.slice(i - period + 1, i + 1); + const sliceLow = low.slice(i - period + 1, i + 1); + const highestHigh = Math.max(...sliceHigh); + const lowestLow = Math.min(...sliceLow); + senkouSpanB.push((highestHigh + lowestLow) / 2); + } + return senkouSpanB; + } + + function calculateChikouSpan(close: number[], period: number): number[] { + const chikouSpan: number[] = []; + for (let i = 0; i < close.length - period; i++) { + chikouSpan.push(close[i]); + } + return chikouSpan; + } +} + +/** + * Keltner Channels + */ +export function keltnerChannels( + ohlcv: OHLCVData[], + period: number = 20, + multiplier: number = 2 +): { + upper: number[]; + middle: number[]; + lower: number[]; +} { + const atrValues = atr(ohlcv, period); + const middle = sma(ohlcv.map(item => (item.high + item.low + item.close) / 3), period); + const upper: number[] = []; + const lower: number[] = []; + + for (let i = 0; i < middle.length; i++) { + upper.push(middle[i] + multiplier * atrValues[i]); + lower.push(middle[i] - multiplier * atrValues[i]); + } + + return { + upper, + middle, + lower + }; +} + +/** + * Donchian Channels + */ +export function donchianChannels( + ohlcv: OHLCVData[], + period: number = 20 +): { + upper: number[]; + middle: number[]; + lower: number[]; +} { + const upper: number[] = []; + const lower: number[] = []; + const middle: number[] = []; + + for (let i = period - 1; i < ohlcv.length; i++) { + const slice = ohlcv.slice(i - period + 1, i + 1); + const highestHigh = Math.max(...slice.map(item => item.high)); + const lowestLow = Math.min(...slice.map(item => item.low)); + + upper.push(highestHigh); + lower.push(lowestLow); + middle.push((highestHigh + lowestLow) / 2); + } + + return { + upper, + middle, + lower + }; +} + +/** + * Elder-Ray Index + */ +export function elderRay( + ohlcv: OHLCVData[], + period: number = 13 +): { + bullPower: number[]; + bearPower: number[]; +} { + const closePrices = ohlcv.map(item => item.close); + const emaValues = ema(closePrices, period); + const bullPower: number[] = []; + const bearPower: number[] = []; + + // Adjust the indexing to ensure we're matching the correct EMA value with each candle + for (let i = period - 1; i < ohlcv.length; i++) { + // Using the proper index for the EMA values which are aligned with closePrices + // Since ema() returns values starting from the period-th element + const emaIndex = i - (period - 1); + if (emaIndex >= 0 && emaIndex < emaValues.length) { + bullPower.push(ohlcv[i].high - emaValues[emaIndex]); + bearPower.push(ohlcv[i].low - emaValues[emaIndex]); + } + } + + return { + bullPower, + bearPower + }; +} + +/** + * Force Index + */ +export function forceIndex( + ohlcv: OHLCVData[], + period: number = 13 +): number[] { + const forceIndexValues: number[] = []; + + for (let i = 1; i < ohlcv.length; i++) { + const change = ohlcv[i].close - ohlcv[i - 1].close; + const volume = ohlcv[i].volume; + forceIndexValues.push(change * volume); + } + + const smaValues = sma(forceIndexValues, period); + return smaValues; +} + +/** + * Moving Average Envelope + */ +export function movingAverageEnvelope( + prices: number[], + period: number = 20, + percentage: number = 0.05 +): { + upper: number[]; + lower: number[]; + middle: number[]; +} { + const middle = sma(prices, period); + const upper: number[] = middle.map(value => value * (1 + percentage)); + const lower: number[] = middle.map(value => value * (1 - percentage)); + + return { + upper, + lower, + middle + }; +} + +/** + * High-Low Index + */ +export function highLowIndex( + ohlcv: OHLCVData[], + period: number = 14 +): number[] { + const highLowIndexValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + let newHighs = 0; + let newLows = 0; + + for (let j = i - period; j <= i; j++) { + if (ohlcv[j].close === Math.max(...ohlcv.slice(i - period, i + 1).map(item => item.close))) { + newHighs++; + } + if (ohlcv[j].close === Math.min(...ohlcv.slice(i - period, i + 1).map(item => item.close))) { + newLows++; + } + } + + highLowIndexValues.push(((newHighs - newLows) / (newHighs + newLows)) * 100); + } + + return highLowIndexValues; +} + +/** + * Coppock Curve + */ +export function coppockCurve( + prices: number[], + longPeriod: number = 14, + shortPeriod: number = 11, + weightedMovingAveragePeriod: number = 10 +): number[] { + const rocLong = roc(prices, longPeriod); + const rocShort = roc(prices, shortPeriod); + + const sumROC: number[] = rocLong.map((value, index) => value + rocShort[index]); + + return sma(sumROC, weightedMovingAveragePeriod); +} + +/** + * Ease of Movement (EMV) + */ +export function easeOfMovement( + ohlcv: OHLCVData[], + period: number = 14 +): number[] { + const emv: number[] = []; + + for (let i = 1; i < ohlcv.length; i++) { + const distance = ((ohlcv[i].high + ohlcv[i].low) / 2) - ((ohlcv[i - 1].high + ohlcv[i - 1].low) / 2); + const boxRatio = (ohlcv[i].volume / 100000000) / (ohlcv[i].high - ohlcv[i].low); // Scale volume to avoid very small numbers + + emv.push(distance / boxRatio); + } + + return sma(emv, period); +} + +/** + * Mass Index + */ +export function massIndex( + ohlcv: OHLCVData[], + period: number = 9, + emaPeriod: number = 25 +): number[] { + const singleEma: number[] = ema(ohlcv.map(item => item.high - item.low), emaPeriod); + const doubleEma: number[] = ema(singleEma, emaPeriod); + + const massIndexValues: number[] = []; + for (let i = period; i < doubleEma.length; i++) { + let sum = 0; + for (let j = i - period; j < i; j++) { + sum += singleEma[j] / doubleEma[j]; + } + massIndexValues.push(sum); + } + + return massIndexValues; +} + +/** + * Ultimate Oscillator + */ +export function ultimateOscillator( + ohlcv: OHLCVData[], + shortPeriod: number = 7, + mediumPeriod: number = 14, + longPeriod: number = 28 +): number[] { + const ultimateOscillatorValues: number[] = []; + + for (let i = longPeriod; i < ohlcv.length; i++) { + let trueRangeSum = 0; + let buyingPressureSum = 0; + + for (let j = i; j > 0 && j >= i - longPeriod; j--) { + const trueRange = Math.max( + ohlcv[j].high - ohlcv[j].low, + Math.abs(ohlcv[j].high - ohlcv[j - 1].close), + Math.abs(ohlcv[j].low - ohlcv[j - 1].close) + ); + + const buyingPressure = ohlcv[j].close - Math.min(ohlcv[j].low, ohlcv[j - 1].close); + + trueRangeSum += trueRange; + buyingPressureSum += buyingPressure; + } + + const ultimateOscillatorValue = (100 * ( + (4 * buyingPressureSum / trueRangeSum) + + (2 * buyingPressureSum / trueRangeSum) + + (buyingPressureSum / trueRangeSum) + ) / 7); + + ultimateOscillatorValues.push(ultimateOscillatorValue); + } + + return ultimateOscillatorValues; +} + +/** + * Schaff Trend Cycle (STC) + */ +export function schaffTrendCycle( + prices: number[], + period: number = 10, + fastMAPeriod: number = 23, + slowMAPeriod: number = 50 +): number[] { + const macdValues = macd(prices, fastMAPeriod, slowMAPeriod); + const maxValue = Math.max(...macdValues.macd); + const minValue = Math.min(...macdValues.macd); + + const kValues: number[] = macdValues.macd.map(value => (value - minValue) / (maxValue - minValue) * 100); + const dValues: number[] = sma(kValues, period); + + return dValues; +} + +/** + * Hilbert Transform - Instantaneous Trendline + */ +export function hilbertTransformInstantaneousTrendline( + prices: number[] +): number[] { + // This is a placeholder. A full Hilbert Transform implementation is complex. + // Requires significantly more code and signal processing knowledge. + // Returning a simple moving average as a substitute. + return sma(prices, 20); +} + +/** + * Relative Volatility Index (RVI) + */ +export function relativeVolatilityIndex( + ohlcv: OHLCVData[], + period: number = 14 +): number[] { + const rviValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + let highCloseSum = 0; + let lowCloseSum = 0; + + for (let j = i; j > 0 && j >= i - period; j--) { + highCloseSum += Math.pow(ohlcv[j].high - ohlcv[j].close, 2); + lowCloseSum += Math.pow(ohlcv[j].low - ohlcv[j].close, 2); + } + + const highCloseStdDev = Math.sqrt(highCloseSum / period); + const lowCloseStdDev = Math.sqrt(lowCloseSum / period); + + const rviValue = 100 * highCloseStdDev / (highCloseStdDev + lowCloseStdDev); + rviValues.push(rviValue); + } + + return rviValues; +} + +/** + * Chande Momentum Oscillator (CMO) + */ +export function chandeMomentumOscillator(prices: number[], period: number = 14): number[] { + const cmoValues: number[] = []; + + for (let i = period; i < prices.length; i++) { + let sumOfGains = 0; + let sumOfLosses = 0; + + for (let j = i; j > 0 && j >= i - period; j--) { + const change = prices[j] - prices[j - 1]; + if (change > 0) { + sumOfGains += change; + } else { + sumOfLosses += Math.abs(change); + } + } + + const cmoValue = 100 * (sumOfGains - sumOfLosses) / (sumOfGains + sumOfLosses); + cmoValues.push(cmoValue); + } + + return cmoValues; +} + +/** + * Detrended Price Oscillator (DPO) + */ +export function detrendedPriceOscillator(prices: number[], period: number = 20): number[] { + const dpoValues: number[] = []; + const smaValues = sma(prices, period); + + for (let i = period; i < prices.length; i++) { + const dpoValue = prices[i - Math.floor(period / 2) - 1] - smaValues[i - period]; + dpoValues.push(dpoValue); + } + + return dpoValues; +} + +/** + * Fractal Chaos Bands + */ +export function fractalChaosBands(ohlcv: OHLCVData[], period: number = 20): { upper: number[], lower: number[] } { + const upper: number[] = []; + const lower: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + const slice = ohlcv.slice(i - period + 1, i + 1); + const highestHigh = Math.max(...slice.map(item => item.high)); + const lowestLow = Math.min(...slice.map(item => item.low)); + + upper.push(highestHigh); + lower.push(lowestLow); + } + + return { + upper, + lower + }; +} + +/** + * Know Sure Thing (KST) Oscillator + */ +export function knowSureThing( + prices: number[], + rocPeriod1: number = 10, + rocPeriod2: number = 15, + rocPeriod3: number = 20, + rocPeriod4: number = 30, + smaPeriod1: number = 10, + smaPeriod2: number = 10, + smaPeriod3: number = 10, + smaPeriod4: number = 15 +): number[] { + const roc1 = roc(prices, rocPeriod1); + const roc2 = roc(prices, rocPeriod2); + const roc3 = roc(prices, rocPeriod3); + const roc4 = roc(prices, rocPeriod4); + + const sma1 = sma(roc1, smaPeriod1); + const sma2 = sma(roc2, smaPeriod2); + const sma3 = sma(roc3, smaPeriod3); + const sma4 = sma(roc4, smaPeriod4); + + const kstValues: number[] = []; + + for (let i = 0; i < sma1.length; i++) { + const kstValue = sma1[i] + sma2[i] + sma3[i] + sma4[i]; + kstValues.push(kstValue); + } + + return kstValues; +} + +/** + * Percentage Price Oscillator (PPO) + */ +export function percentagePriceOscillator( + prices: number[], + fastPeriod: number = 12, + slowPeriod: number = 26 +): number[] { + const fastEMA = ema(prices, fastPeriod); + const slowEMA = ema(prices, slowPeriod); + + const ppoValues: number[] = []; + + for (let i = 0; i < fastEMA.length; i++) { + const ppoValue = ((fastEMA[i] - slowEMA[i]) / slowEMA[i]) * 100; + ppoValues.push(ppoValue); + } + + return ppoValues; +} + +/** + * Price Volume Trend (PVT) + */ +export function priceVolumeTrend(ohlcv: OHLCVData[]): number[] { + const pvtValues: number[] = [0]; // Initialize with 0 + + for (let i = 1; i < ohlcv.length; i++) { + const change = (ohlcv[i].close - ohlcv[i - 1].close) / ohlcv[i - 1].close; + const pvtValue = pvtValues[i - 1] + (change * ohlcv[i].volume); + pvtValues.push(pvtValue); + } + + return pvtValues; +} + +/** + * Q Stick + */ +export function qStick(ohlcv: OHLCVData[], period: number = 10): number[] { + const qStickValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + let sum = 0; + for (let j = i; j > 0 && j >= i - period; j--) { + sum += ohlcv[j].close - ohlcv[j].open; + } + qStickValues.push(sum / period); + } + + return qStickValues; +} + +/** + * TRIX (Triple Exponentially Smoothed Average) + */ +export function trix(prices: number[], period: number = 18): number[] { + const ema1 = ema(prices, period); + const ema2 = ema(ema1, period); + const ema3 = ema(ema2, period); + + const trixValues: number[] = []; + + for (let i = 1; i < ema3.length; i++) { + const trixValue = ((ema3[i] - ema3[i - 1]) / ema3[i - 1]) * 100; + trixValues.push(trixValue); + } + + return trixValues; +} + +/** + * Vertical Horizontal Filter (VHF) + */ +export function verticalHorizontalFilter(ohlcv: OHLCVData[], period: number = 28): number[] { + const vhfValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + const slice = ohlcv.slice(i - period + 1, i + 1); + const highestHigh = Math.max(...slice.map(item => item.high)); + const lowestLow = Math.min(...slice.map(item => item.low)); + const closeChanges: number[] = []; + + for (let j = 1; j < slice.length; j++) { + closeChanges.push(Math.abs(slice[j].close - slice[j - 1].close)); + } + + const sumOfCloseChanges = closeChanges.reduce((a, b) => a + b, 0); + const vhfValue = (highestHigh - lowestLow) / sumOfCloseChanges; + vhfValues.push(vhfValue); + } + + return vhfValues; +} + +/** + * Volume Rate of Change (VROC) + */ +export function volumeRateOfChange(ohlcv: OHLCVData[], period: number = 10): number[] { + const vrocValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + if (ohlcv[i - period].volume === 0) { + vrocValues.push(0); // Avoid division by zero + } else { + const vrocValue = ((ohlcv[i].volume - ohlcv[i - period].volume) / ohlcv[i - period].volume) * 100; + vrocValues.push(vrocValue); + } + } + + return vrocValues; +} + +/** + * Average True Range Trailing Stops + * Calculates trailing stop levels based on ATR + */ +export function atrTrailingStops( + ohlcv: OHLCVData[], + period: number = 14, + multiplier: number = 3 +): { + longStop: number[]; + shortStop: number[]; +} { + const atrValues = atr(ohlcv, period); + const longStop: number[] = []; + const shortStop: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + longStop.push(ohlcv[i].low - multiplier * atrValues[i - period]); + shortStop.push(ohlcv[i].high + multiplier * atrValues[i - period]); + } + + return { + longStop, + shortStop + }; +} + +/** + * Elder's Force Index + * Measures the strength of a trend by combining price and volume + */ +export function eldersForceIndex( + ohlcv: OHLCVData[], + period: number = 13 +): number[] { + const forceIndexValues: number[] = []; + + for (let i = 1; i < ohlcv.length; i++) { + const change = ohlcv[i].close - ohlcv[i - 1].close; + const volume = ohlcv[i].volume; + forceIndexValues.push(change * volume); + } + + return ema(forceIndexValues, period); +} + +/** + * Ultimate Oscillator + */ +export function trueStrengthIndex( + prices: number[], + longPeriod: number = 25, + shortPeriod: number = 13, + signalPeriod: number = 9 +): number[] { + const priceChanges: number[] = []; + for (let i = 1; i < prices.length; i++) { + priceChanges.push(prices[i] - prices[i - 1]); + } + + const smoothedMomentum = ema(priceChanges, shortPeriod); + const doubleSmoothedMomentum = ema(smoothedMomentum, longPeriod); + + const absoluteMomentum = priceChanges.map(Math.abs); + const smoothedAbsoluteMomentum = ema(absoluteMomentum, shortPeriod); + const doubleSmoothedAbsoluteMomentum = ema(smoothedAbsoluteMomentum, longPeriod); + + const tsiValues: number[] = []; + for (let i = longPeriod; i < prices.length - 1; i++) { + tsiValues.push( + (doubleSmoothedMomentum[i - longPeriod] / doubleSmoothedAbsoluteMomentum[i - longPeriod]) * 100 + ); + } + + return tsiValues; +} + +/** + * Money Flow Multiplier + * Calculates the Money Flow Multiplier + */ +export function moneyFlowMultiplier(ohlcv: OHLCVData[]): number[] { + return ohlcv.map(candle => ((candle.close - candle.low) - (candle.high - candle.close)) / (candle.high - candle.low)); +} + +/** + * Positive Volume Index (PVI) + */ +export function positiveVolumeIndex(ohlcv: OHLCVData[], initialValue: number = 1000): number[] { + const pviValues: number[] = [initialValue]; + + for (let i = 1; i < ohlcv.length; i++) { + if (ohlcv[i].volume > ohlcv[i - 1].volume) { + const change = (ohlcv[i].close - ohlcv[i - 1].close) / ohlcv[i - 1].close; + pviValues.push(pviValues[i - 1] + (pviValues[i - 1] * change)); + } else { + pviValues.push(pviValues[i - 1]); + } + } + + return pviValues; +} + +/** + * Negative Volume Index (NVI) + */ +export function negativeVolumeIndex(ohlcv: OHLCVData[], initialValue: number = 1000): number[] { + const nviValues: number[] = [initialValue]; + + for (let i = 1; i < ohlcv.length; i++) { + if (ohlcv[i].volume < ohlcv[i - 1].volume) { + const change = (ohlcv[i].close - ohlcv[i - 1].close) / ohlcv[i - 1].close; + nviValues.push(nviValues[i - 1] + (nviValues[i - 1] * change)); + } else { + nviValues.push(nviValues[i - 1]); + } + } + + return nviValues; +} + +/** + * Typical Price + * Calculates the typical price for each period + */ +export function typicalPrice(ohlcv: OHLCVData[]): number[] { + return ohlcv.map(candle => (candle.high + candle.low + candle.close) / 3); +} + +/** + * Median Price + * Calculates the median price for each period + */ +export function medianPrice(ohlcv: OHLCVData[]): number[] { + return ohlcv.map(candle => (candle.high + candle.low) / 2); +} + +/** + * On Balance Volume Mean (OBV Mean) + * Calculates the mean of the On Balance Volume (OBV) values. + */ +export function onBalanceVolumeMean(ohlcv: OHLCVData[], period: number = 14): number[] { + const obvValues = obv(ohlcv); + return sma(obvValues, period); +} + +/** + * Kaufman's Adaptive Moving Average (KAMA) + */ +export function kama(prices: number[], period: number = 10, fastPeriod: number = 2, slowPeriod: number = 30): number[] { + const kamaValues: number[] = []; + + if (prices.length <= period) { + return kamaValues; + } + + // Calculate the initial KAMA using SMA + const firstSMA = prices.slice(0, period).reduce((sum, price) => sum + price, 0) / period; + let kama = firstSMA; + kamaValues.push(kama); + + // Constants for the calculation + const fastConst = 2 / (fastPeriod + 1); + const slowConst = 2 / (slowPeriod + 1); + + for (let i = period; i < prices.length; i++) { + // Calculate direction - the numerator of the efficiency ratio + const direction = Math.abs(prices[i] - prices[i - period]); + + // Calculate volatility - the denominator of the efficiency ratio + let volatility = 0; + for (let j = i - period + 1; j <= i; j++) { + volatility += Math.abs(prices[j] - prices[j - 1]); + } + + // Calculate efficiency ratio (ER) + // Handle the case where volatility is zero to avoid division by zero + const er = volatility === 0 ? 1 : Math.min(direction / volatility, 1); + + // Calculate smoothing constant (SC) + const sc = Math.pow(er * (fastConst - slowConst) + slowConst, 2); + + // Calculate KAMA + kama = kama + sc * (prices[i] - kama); + kamaValues.push(kama); + } + + return kamaValues; +} + +/** + * DeMarker + */ +export function deMarker(ohlcv: OHLCVData[], period: number = 14): number[] { + const deMax: number[] = []; + const deMin: number[] = []; + + for (let i = 1; i < ohlcv.length; i++) { + deMax.push(ohlcv[i].high > ohlcv[i - 1].high ? ohlcv[i].high - ohlcv[i - 1].high : 0); + deMin.push(ohlcv[i].low < ohlcv[i - 1].low ? ohlcv[i - 1].low - ohlcv[i].low : 0); + } + + const sumDeMax = sma(deMax, period); + const sumDeMin = sma(deMin, period); + + const deMarkerValues: number[] = []; + for (let i = period; i < ohlcv.length; i++) { + deMarkerValues.push(sumDeMax[i - period] / (sumDeMax[i - period] + sumDeMin[i - period])); + } + + return deMarkerValues; +} + +/** + * Elder's SafeZone Stops + */ +export function eldersSafeZoneStops(ohlcv: OHLCVData[], atrPeriod: number = 20, percentageRisk: number = 2): { longStop: number[], shortStop: number[] } { + const atrValues = atr(ohlcv, atrPeriod); + const longStop: number[] = []; + const shortStop: number[] = []; + + for (let i = atrPeriod; i < ohlcv.length; i++) { + longStop.push(ohlcv[i].low - (atrValues[i - atrPeriod] * (percentageRisk / 100))); + shortStop.push(ohlcv[i].high + (atrValues[i - atrPeriod] * (percentageRisk / 100))); + } + + return { + longStop, + shortStop + }; +} + +/** + * Projection Oscillator + */ +export function projectionOscillator(ohlcv: OHLCVData[], period: number = 14): number[] { + const projectionOscillatorValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + let highestHigh = ohlcv[i - period].high; + let lowestLow = ohlcv[i - period].low; + + for (let j = i - period; j < i; j++) { + if (ohlcv[j].high > highestHigh) { + highestHigh = ohlcv[j].high; + } + if (ohlcv[j].low < lowestLow) { + lowestLow = ohlcv[j].low; + } + } + + const projectionOscillatorValue = ((ohlcv[i].close - lowestLow) / (highestHigh - lowestLow)) * 100; + projectionOscillatorValues.push(projectionOscillatorValue); + } + + return projectionOscillatorValues; +} + +/** + * Twiggs Money Flow + */ +export function twiggsMoneyFlow(ohlcv: OHLCVData[]): number[] { + const twiggsMoneyFlowValues: number[] = []; + + for (let i = 0; i < ohlcv.length; i++) { + const moneyFlowVolume = ohlcv[i].volume * (((ohlcv[i].close - ohlcv[i].low) - (ohlcv[i].high - ohlcv[i].close)) / (ohlcv[i].high - ohlcv[i].low)); + twiggsMoneyFlowValues.push(moneyFlowVolume); + } + + return twiggsMoneyFlowValues; +} + + +/** + * Relative Strength + * Compares the performance of one asset to another + */ +export function relativeStrength(prices1: number[], prices2: number[], period: number = 14): number[] { + const rsValues: number[] = []; + const sma1 = sma(prices1, period); + const sma2 = sma(prices2, period); + + for (let i = 0; i < sma1.length; i++) { + rsValues.push(sma1[i] / sma2[i]); + } + + return rsValues; +} + +/** + * Correlation Coefficient + * Measures the statistical relationship between two assets + */ +export function correlationCoefficient(prices1: number[], prices2: number[], period: number = 14): number[] { + const correlationValues: number[] = []; + + for (let i = period; i < prices1.length; i++) { + const slice1 = prices1.slice(i - period, i); + const slice2 = prices2.slice(i - period, i); + + const mean1 = slice1.reduce((a, b) => a + b, 0) / period; + const mean2 = slice2.reduce((a, b) => a + b, 0) / period; + + let sumXY = 0; + let sumX2 = 0; + let sumY2 = 0; + + for (let j = 0; j < period; j++) { + sumXY += (slice1[j] - mean1) * (slice2[j] - mean2); + sumX2 += Math.pow(slice1[j] - mean1, 2); + sumY2 += Math.pow(slice2[j] - mean2, 2); + } + + const correlation = sumXY / (Math.sqrt(sumX2) * Math.sqrt(sumY2)); + correlationValues.push(correlation); + } + + return correlationValues; +} + +/** + * Coppock Range + * Calculates the range between high and low Coppock values + */ +export function coppockRange(prices: number[], longPeriod: number = 14, shortPeriod: number = 11, wmaPeriod: number = 10): { high: number[], low: number[] } { + const coppockValues = coppockCurve(prices, longPeriod, shortPeriod, wmaPeriod); + const highValues: number[] = []; + const lowValues: number[] = []; + + for (let i = 1; i < coppockValues.length; i++) { + highValues.push(Math.max(coppockValues[i], coppockValues[i - 1])); + lowValues.push(Math.min(coppockValues[i], coppockValues[i - 1])); + } + + return { + high: highValues, + low: lowValues + }; +} + +/** + * Chaikin Oscillator + * Calculates the difference between two moving averages of the Accumulation/Distribution Line + */ +export function chaikinOscillator(ohlcv: OHLCVData[], fastPeriod: number = 3, slowPeriod: number = 10): number[] { + const adlValues = accumulationDistribution(ohlcv); + const fastMA = ema(adlValues, fastPeriod); + const slowMA = ema(adlValues, slowPeriod); + + const chaikinOscillatorValues: number[] = []; + for (let i = 0; i < fastMA.length; i++) { + chaikinOscillatorValues.push(fastMA[i] - slowMA[i]); + } + + return chaikinOscillatorValues; +} + +/** + * Prime Number Oscillator + * Uses prime numbers to create an oscillator + */ +export function primeNumberOscillator(prices: number[], period: number = 14): number[] { + const primeNumbers = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43]; // First 14 prime numbers + const pnoValues: number[] = []; + + for (let i = period; i < prices.length; i++) { + let sum = 0; + for (let j = 0; j < period; j++) { + sum += prices[i - j] * primeNumbers[j]; + } + pnoValues.push(sum); + } + + return pnoValues; +} + +/** + * Fractal Efficiency + * Measures the efficiency of price movement based on fractal dimension + */ +export function fractalEfficiency(ohlcv: OHLCVData[], period: number = 20): number[] { + const fractalEfficiencyValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + let netDistance = 0; + for (let j = i; j > i - period; j--) { + netDistance += Math.sqrt(Math.pow(ohlcv[j].close - ohlcv[j - 1].close, 2)); + } + + const straightLineDistance = Math.sqrt(Math.pow(ohlcv[i].close - ohlcv[i - period].close, 2)); + const fractalEfficiencyValue = straightLineDistance / netDistance; + fractalEfficiencyValues.push(fractalEfficiencyValue); + } + + return fractalEfficiencyValues; +} + +/** + * Market Facilitation Index (MFI) + */ +export function marketFacilitationIndex(ohlcv: OHLCVData[]): number[] { + const mfiValues: number[] = []; + + for (let i = 0; i < ohlcv.length; i++) { + const range = ohlcv[i].high - ohlcv[i].low; + const mfiValue = range / ohlcv[i].volume; + mfiValues.push(mfiValue); + } + + return mfiValues; +} + +/** + * Elder-Disk + * Combination of Elder-Ray and Force Index + */ +export function elderDisk(ohlcv: OHLCVData[], period: number = 13): number[] { + const { bullPower, bearPower } = elderRay(ohlcv, period); + const forceIndexValues = forceIndex(ohlcv, period); + + const elderDiskValues: number[] = []; + for (let i = 0; i < bullPower.length; i++) { + elderDiskValues.push(bullPower[i] + bearPower[i] + forceIndexValues[i]); + } + + return elderDiskValues; +} + +/** + * Relative Vigor Index (RVI) + */ +export function relativeVigorIndex(ohlcv: OHLCVData[], period: number = 10): number[] { + const rviValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + let sumNumerator = 0; + let sumDenominator = 0; + + for (let j = i; j > i - period; j--) { + sumNumerator += (ohlcv[j].close - ohlcv[j].open) * (ohlcv[j].high - ohlcv[j].low); + sumDenominator += (ohlcv[j].high - ohlcv[j].low) * (ohlcv[j].high - ohlcv[j].low); + } + + const rviValue = sumDenominator !== 0 ? sumNumerator / sumDenominator : 0; + rviValues.push(rviValue); + } + + return rviValues; +} + +/** + * Balance of Power (BOP) + */ +export function balanceOfPower(ohlcv: OHLCVData[]): number[] { + const bopValues: number[] = []; + + for (let i = 0; i < ohlcv.length; i++) { + const range = ohlcv[i].high - ohlcv[i].low; + const bopValue = range !== 0 ? (ohlcv[i].close - ohlcv[i].open) / range : 0; + bopValues.push(bopValue); + } + + return bopValues; +} + +/** + * Stochastic RSI + * Combines Stochastic Oscillator and RSI to provide overbought/oversold signals + */ +export function stochasticRSI( + prices: number[], + rsiPeriod: number = 14, + stochasticPeriod: number = 14, + smoothPeriod: number = 3 +): { k: number[]; d: number[] } { + const rsiValues = rsi(prices, rsiPeriod); + return stochastic( + rsiValues.map(rsi => ({ high: rsi, low: rsi, close: rsi, open: rsi, volume: 0 } as OHLCVData)), + stochasticPeriod, + smoothPeriod + ); +} + +/** + * StochRSI Fast + */ +export function stochRSIFast( + prices: number[], + rsiPeriod: number = 14, + stochasticPeriod: number = 14 +): { k: number[]; d: number[] } { + const rsiValues = rsi(prices, rsiPeriod); + return stochastic( + rsiValues.map(rsi => ({ high: rsi, low: rsi, close: rsi, open: rsi, volume: 0 } as OHLCVData)), + stochasticPeriod, + 1 + ); +} + +/** + * StochRSI Full + */ +export function stochRSIFull( + prices: number[], + rsiPeriod: number = 14, + stochasticPeriod: number = 14, + kSmoothPeriod: number = 3, + dSmoothPeriod: number = 3 +): { k: number[]; d: number[] } { + const rsiValues = rsi(prices, rsiPeriod); + const { k } = stochastic( + rsiValues.map(rsi => ({ high: rsi, low: rsi, close: rsi, open: rsi, volume: 0 } as OHLCVData)), + stochasticPeriod, + kSmoothPeriod + ); + const d = sma(k, dSmoothPeriod); + return { k, d }; +} + +/** + * Normalized Average True Range (NATR) + */ +export function normalizedAverageTrueRange(ohlcv: OHLCVData[], period: number = 14): number[] { + const atrValues = atr(ohlcv, period); + const natrValues: number[] = []; + + for (let i = 0; i < atrValues.length; i++) { + natrValues.push((atrValues[i] / ohlcv[i].close) * 100); + } + + return natrValues; +} + +/** + * Pretty Good Oscillator (PGO) + */ +export function prettyGoodOscillator(ohlcv: OHLCVData[], period: number = 14): number[] { + const pgoValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + let sumHighLow = 0; + let sumCloseOpen = 0; + + for (let j = i; j > i - period; j--) { + sumHighLow += ohlcv[j].high - ohlcv[j].low; + sumCloseOpen += ohlcv[j].close - ohlcv[j].open; + } + + const pgoValue = sumHighLow !== 0 ? sumCloseOpen / sumHighLow : 0; + pgoValues.push(pgoValue); + } + + return pgoValues; +} + +/** + * Intraday Intensity Index (III) + */ +export function intradayIntensityIndex(ohlcv: OHLCVData[]): number[] { + const iiiValues: number[] = []; + + for (let i = 0; i < ohlcv.length; i++) { + const volume = ohlcv[i].volume; + const range = ohlcv[i].high - ohlcv[i].low; + const iiiValue = range !== 0 ? ((2 * ohlcv[i].close - ohlcv[i].high - ohlcv[i].low) / range) * volume : 0; + iiiValues.push(iiiValue); + } + + return iiiValues; +} + +/** + * Money Flow Chaikin A/D Oscillator + * Uses the Chaikin A/D line to create an oscillator + */ +export function moneyFlowChaikinOscillator(ohlcv: OHLCVData[], fastPeriod: number = 3, slowPeriod: number = 10): number[] { + const adlValues = accumulationDistribution(ohlcv); + const fastMA = ema(adlValues, fastPeriod); + const slowMA = ema(adlValues, slowPeriod); + + const moneyFlowChaikinOscillatorValues: number[] = []; + for (let i = 0; i < fastMA.length; i++) { + moneyFlowChaikinOscillatorValues.push(fastMA[i] - slowMA[i]); + } + + return moneyFlowChaikinOscillatorValues; +} + +/** + * Elder's Thermometer + * Uses high and low prices to gauge market temperature + */ +export function eldersThermometer(ohlcv: OHLCVData[], period: number = 20): number[] { + const eldersThermometerValues: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + let sumOfHighs = 0; + let sumOfLows = 0; + + for (let j = i; j > i - period; j--) { + sumOfHighs += ohlcv[j].high; + sumOfLows += ohlcv[j].low; + } + + const averageHigh = sumOfHighs / period; + const averageLow = sumOfLows / period; + const thermometerValue = averageHigh - averageLow; + eldersThermometerValues.push(thermometerValue); + } + + return eldersThermometerValues; +} + +/** + * High-Low Range + * Calculates the range between high and low prices + */ +export function highLowRange(ohlcv: OHLCVData[]): number[] { + return ohlcv.map(candle => candle.high - candle.low); +} + +/** + * Typical Price Range + * Calculates the range of typical prices + */ +export function typicalPriceRange(ohlcv: OHLCVData[]): number[] { + const typicalPrices = typicalPrice(ohlcv); + const typicalPriceRangeValues: number[] = []; + + for (let i = 1; i < typicalPrices.length; i++) { + typicalPriceRangeValues.push(typicalPrices[i] - typicalPrices[i - 1]); + } + + return typicalPriceRangeValues; +} + +/** + * Median Price Range + * Calculates the range of median prices + */ +export function medianPriceRange(ohlcv: OHLCVData[]): number[] { + const medianPrices = medianPrice(ohlcv); + const medianPriceRangeValues: number[] = []; + + for (let i = 1; i < medianPrices.length; i++) { + medianPriceRangeValues.push(medianPrices[i] - medianPrices[i - 1]); + } + + return medianPriceRangeValues; +} + +/** + * Center of Gravity + */ +export function centerOfGravity(prices: number[], period: number = 10): number[] { + const cogValues: number[] = []; + + for (let i = period; i < prices.length; i++) { + let weightedSum = 0; + let sumOfWeights = 0; + + for (let j = 1; j <= period; j++) { + weightedSum += j * prices[i - period + j]; + sumOfWeights += j; + } + + const cogValue = weightedSum / sumOfWeights; + cogValues.push(cogValue); + } + + return cogValues; +} + +/** + * Linear Regression Indicator + */ +export function linearRegressionIndicator(prices: number[], period: number = 14): number[] { + const lriValues: number[] = []; + + if (prices.length < period) { + return lriValues; + } + + for (let i = period; i < prices.length; i++) { + const slice = prices.slice(i - period, i); + + // Calculate means for normalization (increases numerical stability) + const meanX = (period + 1) / 2; // Mean of 1,2,3,...,period + let meanY = 0; + for (let j = 0; j < period; j++) { + meanY += slice[j]; + } + meanY /= period; + + // Calculate covariance and variance with normalized data + let covariance = 0; + let variance = 0; + + for (let j = 0; j < period; j++) { + const xDiff = (j + 1) - meanX; + const yDiff = slice[j] - meanY; + + covariance += xDiff * yDiff; + variance += xDiff * xDiff; + } + + // Avoid division by zero + const slope = variance !== 0 ? covariance / variance : 0; + const intercept = meanY - slope * meanX; + + // Calculate the predicted value at the end of the period + const lriValue = slope * period + intercept; + lriValues.push(lriValue); + } + + return lriValues; +} + +/** + * Standard Deviation + * Calculates the standard deviation of a set of values + */ +export function standardDeviation(prices: number[], period: number = 20): number[] { + const stdDevValues: number[] = []; + const smaValues = sma(prices, period); + + for (let i = period - 1; i < prices.length; i++) { + const slice = prices.slice(i - period + 1, i + 1); + const mean = smaValues[i - period + 1]; + let sumOfSquaredDifferences = 0; + + for (const price of slice) { + sumOfSquaredDifferences += Math.pow(price - mean, 2); + } + + const variance = sumOfSquaredDifferences / period; + const stdDevValue = Math.sqrt(variance); + stdDevValues.push(stdDevValue); + } + + return stdDevValues; +} + +/** + * Chaikin A/D Range + * Calculates the range of the Chaikin A/D line + */ +export function chaikinADRange(ohlcv: OHLCVData[]): number[] { + const adValues = accumulationDistribution(ohlcv); + const adRangeValues: number[] = []; + + for (let i = 1; i < adValues.length; i++) { + adRangeValues.push(adValues[i] - adValues[i - 1]); + } + + return adRangeValues; +} + +/** + * Volume Oscillator + * Compares two moving averages of volume + */ +export function volumeOscillator(ohlcv: OHLCVData[], fastPeriod: number = 5, slowPeriod: number = 10): number[] { + const volumes = ohlcv.map(candle => candle.volume); + const fastMA = sma(volumes, fastPeriod); + const slowMA = sma(volumes, slowPeriod); + + const volumeOscillatorValues: number[] = []; + for (let i = 0; i < fastMA.length; i++) { + volumeOscillatorValues.push((fastMA[i] - slowMA[i]) / slowMA[i] * 100); + } + + return volumeOscillatorValues; +} + +/** + * Money Flow Index Range + * Calculates the range of the Money Flow Index + */ +export function moneyFlowIndexRange(ohlcv: OHLCVData[], period: number = 14): number[] { + const mfiValues = mfi(ohlcv, period); + const mfiRangeValues: number[] = []; + + for (let i = 1; i < mfiValues.length; i++) { + mfiRangeValues.push(mfiValues[i] - mfiValues[i - 1]); + } + + return mfiRangeValues; +} + +/** + * On Balance Volume Oscillator + * Calculates the oscillator of the On Balance Volume + */ +export function onBalanceVolumeOscillator(ohlcv: OHLCVData[], fastPeriod: number = 5, slowPeriod: number = 10): number[] { + const obvValues = obv(ohlcv); + const fastMA = sma(obvValues, fastPeriod); + const slowMA = sma(obvValues, slowPeriod); + + const obvOscillatorValues: number[] = []; + for (let i = 0; i < fastMA.length; i++) { + obvOscillatorValues.push((fastMA[i] - slowMA[i]) / slowMA[i] * 100); + } + + return obvOscillatorValues; +} + +/** + * Klinger Oscillator + */ +export function klingerOscillator(ohlcv: OHLCVData[], fastPeriod: number = 34, slowPeriod: number = 55): number[] { + if (ohlcv.length < 2) { + return []; + } + + // Calculate volume force + const volumeForce: number[] = []; + + for (let i = 1; i < ohlcv.length; i++) { + const current = ohlcv[i]; + const previous = ohlcv[i - 1]; + + // Calculate typical prices + const typicalPriceCurrent = (current.high + current.low + current.close) / 3; + const typicalPricePrevious = (previous.high + previous.low + previous.close) / 3; + + // Determine trend + const trend = typicalPriceCurrent > typicalPricePrevious ? 1 : -1; + + // Calculate volume force + const force = trend * ohlcv[i].volume * Math.abs(typicalPriceCurrent - typicalPricePrevious); + volumeForce.push(force); + } + + // Calculate fast and slow EMAs of the volume force + const fastEMA = ema(volumeForce, fastPeriod); + const slowEMA = ema(volumeForce, slowPeriod); + + // Calculate Klinger Oscillator + const klingerOscillatorValues: number[] = []; + + // Both EMAs should have the same starting point + const startIndex = Math.abs(fastEMA.length - slowEMA.length); + const shorterEMA = fastEMA.length < slowEMA.length ? fastEMA : slowEMA; + const longerEMA = fastEMA.length < slowEMA.length ? slowEMA : fastEMA; + + for (let i = 0; i < shorterEMA.length; i++) { + if (fastEMA.length < slowEMA.length) { + klingerOscillatorValues.push(shorterEMA[i] - longerEMA[i + startIndex]); + } else { + klingerOscillatorValues.push(longerEMA[i + startIndex] - shorterEMA[i]); + } + } + + return klingerOscillatorValues; +} + +/** + * Directional Movement Index (DMI) + */ +export function directionalMovementIndex(ohlcv: OHLCVData[], period: number = 14): { plusDI: number[], minusDI: number[] } { + const { plusDI, minusDI } = adx(ohlcv, period); + return { plusDI, minusDI }; +} + +/** + * Elder's Cloud + */ +export function eldersCloud(ohlcv: OHLCVData[], period: number = 20): { upper: number[], lower: number[] } { + const emaValues = ema(ohlcv.map(item => item.close), period); + const atrValues = atr(ohlcv, period); + const upper: number[] = []; + const lower: number[] = []; + + for (let i = 0; i < emaValues.length; i++) { + upper.push(emaValues[i] + atrValues[i]); + lower.push(emaValues[i] - atrValues[i]); + } + + return { + upper, + lower + }; +} + +/** + * Ultimate Moving Average (UMA) + */ +export function ultimateMovingAverage(prices: number[], fastPeriod: number = 7, mediumPeriod: number = 14, slowPeriod: number = 28): number[] { + const fastMA = sma(prices, fastPeriod); + const mediumMA = sma(prices, mediumPeriod); + const slowMA = sma(prices, slowPeriod); + + const umaValues: number[] = []; + for (let i = 0; i < fastMA.length; i++) { + umaValues.push((fastMA[i] + mediumMA[i] + slowMA[i]) / 3); + } + + return umaValues; +} + +/** + * Rainbow Oscillator + */ +export function rainbowOscillator(prices: number[], numberOfMAs: number = 7, periodIncrement: number = 5): number[] { + const maValues: number[][] = []; + for (let i = 1; i <= numberOfMAs; i++) { + maValues.push(sma(prices, i * periodIncrement)); + } + + const rainbowOscillatorValues: number[] = []; + for (let i = 0; i < maValues[0].length; i++) { + let sum = 0; + for (let j = 0; j < numberOfMAs; j++) { + sum += maValues[j][i]; + } + rainbowOscillatorValues.push(sum / numberOfMAs); + } + + return rainbowOscillatorValues; +} + +/** + * Guppy Multiple Moving Average (GMMA) + */ +export function guppyMultipleMovingAverage(prices: number[], shortTermPeriods: number[] = [3, 5, 8, 10, 12, 15], longTermPeriods: number[] = [30, 35, 40, 45, 50, 60]): { shortTermMAs: number[][], longTermMAs: number[][] } { + const shortTermMAs: number[][] = []; + const longTermMAs: number[][] = []; + + for (const period of shortTermPeriods) { + shortTermMAs.push(sma(prices, period)); + } + + for (const period of longTermPeriods) { + longTermMAs.push(sma(prices, period)); + } + + return { shortTermMAs, longTermMAs }; +} + +/** + * Historical Volatility + */ +export function historicalVolatility(prices: number[], period: number = 20): number[] { + const logReturns: number[] = []; + for (let i = 1; i < prices.length; i++) { + logReturns.push(Math.log(prices[i] / prices[i - 1])); + } + + const stdDevs = standardDeviation(logReturns, period); + const historicalVolatilityValues: number[] = []; + + for (const stdDev of stdDevs) { + historicalVolatilityValues.push(stdDev * Math.sqrt(252)); // Annualize + } + + return historicalVolatilityValues; +} + +/** + * Donchian Width + */ +export function donchianWidth(ohlcv: OHLCVData[], period: number = 20): number[] { + const { upper, lower } = donchianChannels(ohlcv, period); + const donchianWidthValues: number[] = []; + + for (let i = 0; i < upper.length; i++) { + donchianWidthValues.push(upper[i] - lower[i]); + } + + return donchianWidthValues; +} + +/** + * Chandelier Exit + */ +export function chandelierExit(ohlcv: OHLCVData[], period: number = 22, multiplier: number = 3): { long: number[], short: number[] } { + const atrValues = atr(ohlcv, period); + const long: number[] = []; + const short: number[] = []; + + for (let i = period; i < ohlcv.length; i++) { + const slice = ohlcv.slice(i - period, i); + const highestHigh = Math.max(...slice.map(item => item.high)); + const lowestLow = Math.min(...slice.map(item => item.low)); + + long.push(highestHigh - multiplier * atrValues[i - period]); + short.push(lowestLow + multiplier * atrValues[i - period]); + } + + return { long, short }; +} + +/** + * Projection Bands + */ +export function projectionBands(ohlcv: OHLCVData[], period: number = 14, stdDevMultiplier: number = 2): { upper: number[], lower: number[] } { + const projectionOscillatorValues = projectionOscillator(ohlcv, period); + const stdDevValues = standardDeviation(projectionOscillatorValues, period); + const upper: number[] = []; + const lower: number[] = []; + + for (let i = 0; i < projectionOscillatorValues.length; i++) { + upper.push(projectionOscillatorValues[i] + stdDevMultiplier * stdDevValues[i]); + lower.push(projectionOscillatorValues[i] - stdDevMultiplier * stdDevValues[i]); + } + + return { upper, lower }; +} + +/** + * Range Action Verification Index (RAVI) + */ +export function rangeActionVerificationIndex(prices: number[], longPeriod: number = 65, shortPeriod: number = 10): number[] { + const longMA = sma(prices, longPeriod); + const shortMA = sma(prices, shortPeriod); + + const raviValues: number[] = []; + for (let i = 0; i < longMA.length; i++) { + raviValues.push((shortMA[i] - longMA[i]) / longMA[i] * 100); + } + + return raviValues; +} + +/** + * Momentum from Current Price + * Calculates momentum using the current price and a previous price. Reduces lag compared to using moving averages. + */ +export function momentumFromCurrentPrice(prices: number[], period: number = 10): number[] { + const result: number[] = []; + + for (let i = period; i < prices.length; i++) { + const momentum = prices[i] - prices[i - period]; + result.push(momentum); + } + + return result; +} + +/** + * Rate of Change from Current Price (ROC) + * Calculates ROC using the current price. + */ +export function rocFromCurrentPrice(prices: number[], period: number = 10): number[] { + const result: number[] = []; + + for (let i = period; i < prices.length; i++) { + if (prices[i - period] === 0) { + result.push(0); + } else { + const rocValue = ((prices[i] - prices[i - period]) / prices[i - period]) * 100; + result.push(rocValue); + } + } + + return result; } \ No newline at end of file diff --git a/libs/utils/src/calculations/volatility-models.ts b/libs/utils/src/calculations/volatility-models.ts index e63ee9b..2fdd3ca 100644 --- a/libs/utils/src/calculations/volatility-models.ts +++ b/libs/utils/src/calculations/volatility-models.ts @@ -1,595 +1,595 @@ -/** - * Volatility Models - * Advanced volatility modeling and forecasting tools - */ - -// Local interface definition to avoid circular dependency -interface OHLCVData { - open: number; - high: number; - low: number; - close: number; - volume: number; - timestamp: Date; -} - -export interface GARCHParameters { - omega: number; // Constant term - alpha: number; // ARCH parameter - beta: number; // GARCH parameter - logLikelihood: number; - aic: number; - bic: number; -} - -export interface VolatilityEstimates { - closeToClose: number; - parkinson: number; - garmanKlass: number; - rogersSatchell: number; - yangZhang: number; -} - -export interface VolatilityRegime { - regime: number; - startDate: Date; - endDate: Date; - averageVolatility: number; - observations: number; -} - -export interface VolatilityTerm { - maturity: number; // Days to maturity - impliedVolatility: number; - confidence: number; -} - -export interface HestonParameters { - kappa: number; // Mean reversion speed - theta: number; // Long-term variance - sigma: number; // Volatility of variance - rho: number; // Correlation - v0: number; // Initial variance - logLikelihood: number; -} - -/** - * Calculate realized volatility using different estimators - */ -export function calculateRealizedVolatility( - ohlcv: OHLCVData[], - annualizationFactor: number = 252 -): VolatilityEstimates { - if (ohlcv.length < 2) { - throw new Error('Need at least 2 observations for volatility calculation'); - } - - const n = ohlcv.length; - let closeToCloseSum = 0; - let parkinsonSum = 0; - let garmanKlassSum = 0; - let rogersSatchellSum = 0; - let yangZhangSum = 0; - - // Calculate log returns and volatility estimators - for (let i = 1; i < n; i++) { - const prev = ohlcv[i - 1]; - const curr = ohlcv[i]; - - // Close-to-close - const logReturn = Math.log(curr.close / prev.close); - closeToCloseSum += logReturn * logReturn; - - // Parkinson estimator - const logHighLow = Math.log(curr.high / curr.low); - parkinsonSum += logHighLow * logHighLow; - - // Garman-Klass estimator - const logOpenClose = Math.log(curr.close / curr.open); - garmanKlassSum += 0.5 * logHighLow * logHighLow - (2 * Math.log(2) - 1) * logOpenClose * logOpenClose; - - // Rogers-Satchell estimator - const logHighOpen = Math.log(curr.high / curr.open); - const logHighClose = Math.log(curr.high / curr.close); - const logLowOpen = Math.log(curr.low / curr.open); - const logLowClose = Math.log(curr.low / curr.close); - rogersSatchellSum += logHighOpen * logHighClose + logLowOpen * logLowClose; - - // Yang-Zhang estimator components - const overnight = Math.log(curr.open / prev.close); - yangZhangSum += overnight * overnight + rogersSatchellSum / i; // Simplified for brevity - } - - return { - closeToClose: Math.sqrt((closeToCloseSum / (n - 1)) * annualizationFactor), - parkinson: Math.sqrt((parkinsonSum / (n - 1) / (4 * Math.log(2))) * annualizationFactor), - garmanKlass: Math.sqrt((garmanKlassSum / (n - 1)) * annualizationFactor), - rogersSatchell: Math.sqrt((rogersSatchellSum / (n - 1)) * annualizationFactor), - yangZhang: Math.sqrt((yangZhangSum / (n - 1)) * annualizationFactor) - }; -} - -/** - * Estimate GARCH(1,1) model parameters - */ -export function estimateGARCH( - returns: number[], - maxIterations: number = 100, - tolerance: number = 1e-6 -): GARCHParameters { - const n = returns.length; - - // Initial parameter estimates - let omega = 0.01; - let alpha = 0.05; - let beta = 0.9; - - // Calculate unconditional variance - const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n; - const unconditionalVar = returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1); - - let logLikelihood = -Infinity; - - for (let iter = 0; iter < maxIterations; iter++) { - const variances: number[] = [unconditionalVar]; - let newLogLikelihood = 0; - - // Calculate conditional variances - for (let t = 1; t < n; t++) { - const prevVar = variances[t - 1]; - const prevReturn = returns[t - 1] - meanReturn; - const currentVar = omega + alpha * prevReturn * prevReturn + beta * prevVar; - variances.push(Math.max(currentVar, 1e-8)); // Ensure positive variance - - // Add to log-likelihood - const currentReturn = returns[t] - meanReturn; - newLogLikelihood -= 0.5 * (Math.log(2 * Math.PI) + Math.log(currentVar) + - (currentReturn * currentReturn) / currentVar); - } - - // Check for convergence - if (Math.abs(newLogLikelihood - logLikelihood) < tolerance) { - break; - } - - logLikelihood = newLogLikelihood; - - // Simple gradient update (in practice, use more sophisticated optimization) - const gradientStep = 0.001; - omega = Math.max(0.001, omega + gradientStep); - alpha = Math.max(0.001, Math.min(0.999, alpha + gradientStep)); - beta = Math.max(0.001, Math.min(0.999 - alpha, beta + gradientStep)); - } - - // Calculate information criteria - const k = 3; // Number of parameters - const aic = -2 * logLikelihood + 2 * k; - const bic = -2 * logLikelihood + k * Math.log(n); - - return { - omega, - alpha, - beta, - logLikelihood, - aic, - bic - }; -} - -/** - * Calculate EWMA volatility - */ -export function calculateEWMAVolatility( - returns: number[], - lambda: number = 0.94, - annualizationFactor: number = 252 -): number[] { - const n = returns.length; - const volatilities: number[] = []; - - // Initialize with sample variance - const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n; - let variance = returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1); - - for (let t = 0; t < n; t++) { - if (t > 0) { - const prevReturn = returns[t - 1] - meanReturn; - variance = lambda * variance + (1 - lambda) * prevReturn * prevReturn; - } - volatilities.push(Math.sqrt(variance * annualizationFactor)); - } - - return volatilities; -} - -/** - * Identify volatility regimes - */ -export function identifyVolatilityRegimes( - returns: number[], - numRegimes: number = 3, - windowSize: number = 60 -): VolatilityRegime[] { - // Calculate rolling volatility - const rollingVol: number[] = []; - const timestamps: Date[] = []; - - for (let i = windowSize - 1; i < returns.length; i++) { - const window = returns.slice(i - windowSize + 1, i + 1); - const mean = window.reduce((sum, r) => sum + r, 0) / window.length; - const variance = window.reduce((sum, r) => sum + Math.pow(r - mean, 2), 0) / (window.length - 1); - rollingVol.push(Math.sqrt(variance * 252)); // Annualized - timestamps.push(new Date(Date.now() + i * 24 * 60 * 60 * 1000)); // Mock timestamps - } - - // Simple k-means clustering on absolute returns - const absReturns = returns.map(ret => Math.abs(ret)); - const sortedReturns = [...absReturns].sort((a, b) => a - b); - - // Define regime thresholds - const thresholds: number[] = []; - for (let i = 1; i < numRegimes; i++) { - const index = Math.floor((i / numRegimes) * sortedReturns.length); - thresholds.push(sortedReturns[index]); - } - - // Classify returns into regimes - const regimeSequence = absReturns.map(absRet => { - for (let i = 0; i < thresholds.length; i++) { - if (absRet <= thresholds[i]) return i; - } - return numRegimes - 1; - }); - - // Calculate regime statistics - const regimes: VolatilityRegime[] = []; - for (let regime = 0; regime < numRegimes; regime++) { - const regimeIndices = regimeSequence - .map((r, idx) => r === regime ? idx : -1) - .filter(idx => idx !== -1); - - if (regimeIndices.length > 0) { - const regimeVolatilities = regimeIndices.map(idx => - idx < rollingVol.length ? rollingVol[idx] : 0 - ); - const avgVol = regimeVolatilities.reduce((sum, vol) => sum + vol, 0) / regimeVolatilities.length; - - regimes.push({ - regime, - startDate: new Date(Date.now()), - endDate: new Date(Date.now() + regimeIndices.length * 24 * 60 * 60 * 1000), - averageVolatility: avgVol, - observations: regimeIndices.length - }); - } - } - - return regimes; -} - -/** - * Calculate volatility term structure - */ -export function calculateVolatilityTermStructure( - spotVol: number, - maturities: number[], - meanReversion: number = 0.5 -): VolatilityTerm[] { - return maturities.map(maturity => { - // Simple mean reversion model for term structure - const timeToMaturity = maturity / 365; // Convert to years - const termVolatility = spotVol * Math.exp(-meanReversion * timeToMaturity); - - return { - maturity, - impliedVolatility: Math.max(termVolatility, 0.01), // Floor at 1% - confidence: Math.exp(-timeToMaturity) // Confidence decreases with maturity - }; - }); -} - -/** - * Calculate volatility smile/skew parameters - */ -export function calculateVolatilitySmile( - strikes: number[], - spotPrice: number, - impliedVols: number[] -): { - atmVolatility: number; - skew: number; - convexity: number; - riskReversal: number; -} { - if (strikes.length !== impliedVols.length || strikes.length < 3) { - throw new Error('Need at least 3 strikes with corresponding implied volatilities'); - } - - // Find ATM volatility - const atmIndex = strikes.reduce((closest, strike, idx) => - Math.abs(strike - spotPrice) < Math.abs(strikes[closest] - spotPrice) ? idx : closest, 0 - ); - const atmVolatility = impliedVols[atmIndex]; - - // Calculate skew (derivative at ATM) - let skew = 0; - if (atmIndex > 0 && atmIndex < strikes.length - 1) { - const deltaStrike = strikes[atmIndex + 1] - strikes[atmIndex - 1]; - const deltaVol = impliedVols[atmIndex + 1] - impliedVols[atmIndex - 1]; - skew = deltaVol / deltaStrike; - } - - // Calculate convexity (second derivative) - let convexity = 0; - if (atmIndex > 0 && atmIndex < strikes.length - 1) { - const h = strikes[atmIndex + 1] - strikes[atmIndex]; - convexity = (impliedVols[atmIndex + 1] - 2 * impliedVols[atmIndex] + impliedVols[atmIndex - 1]) / (h * h); - } - - // Risk reversal (put-call vol difference) - const otmPutIndex = strikes.findIndex(strike => strike < spotPrice * 0.9); - const otmCallIndex = strikes.findIndex(strike => strike > spotPrice * 1.1); - let riskReversal = 0; - - if (otmPutIndex !== -1 && otmCallIndex !== -1) { - riskReversal = impliedVols[otmCallIndex] - impliedVols[otmPutIndex]; - } - - return { - atmVolatility, - skew, - convexity, - riskReversal - }; -} - -/** - * Estimate Heston stochastic volatility model parameters - */ -export function estimateHestonParameters( - returns: number[], - maxIterations: number = 100 -): HestonParameters { - const n = returns.length; - - if (n < 10) { - throw new Error('Need at least 10 observations for Heston parameter estimation'); - } - - // Initial parameter estimates - let kappa = 2.0; // Mean reversion speed - let theta = 0.04; // Long-term variance - let sigma = 0.3; // Volatility of variance - let rho = -0.5; // Correlation - let v0 = 0.04; // Initial variance - - // Calculate sample statistics for initialization - const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n; - const sampleVariance = returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1); - - theta = sampleVariance; - v0 = sampleVariance; - - let logLikelihood = -Infinity; - - for (let iter = 0; iter < maxIterations; iter++) { - let newLogLikelihood = 0; - let currentVariance = v0; - - for (let t = 1; t < n; t++) { - const dt = 1.0; // Assuming daily data - const prevReturn = returns[t - 1]; - - // Euler discretization of variance process - const dW1 = Math.random() - 0.5; // Simplified random shock - const dW2 = rho * dW1 + Math.sqrt(1 - rho * rho) * (Math.random() - 0.5); - - const varianceChange = kappa * (theta - currentVariance) * dt + - sigma * Math.sqrt(Math.max(currentVariance, 0)) * dW2; - - currentVariance = Math.max(currentVariance + varianceChange, 0.001); - - // Log-likelihood contribution (simplified) - const expectedReturn = meanReturn; - const variance = currentVariance; - - if (variance > 0) { - newLogLikelihood -= 0.5 * Math.log(2 * Math.PI * variance); - newLogLikelihood -= 0.5 * Math.pow(returns[t] - expectedReturn, 2) / variance; - } - } - - // Check for convergence - if (Math.abs(newLogLikelihood - logLikelihood) < 1e-6) { - break; - } - - logLikelihood = newLogLikelihood; - - // Simple parameter updates (in practice, use maximum likelihood estimation) - const learningRate = 0.001; - kappa = Math.max(0.1, Math.min(10, kappa + learningRate)); - theta = Math.max(0.001, Math.min(1, theta + learningRate)); - sigma = Math.max(0.01, Math.min(2, sigma + learningRate)); - rho = Math.max(-0.99, Math.min(0.99, rho + learningRate * 0.1)); - v0 = Math.max(0.001, Math.min(1, v0 + learningRate)); - } - - return { - kappa, - theta, - sigma, - rho, - v0, - logLikelihood - }; -} - -/** - * Calculate volatility risk metrics - */ -export function calculateVolatilityRisk( - returns: number[], - confidenceLevel: number = 0.05 -): { - volatilityVaR: number; - expectedShortfall: number; - maxVolatility: number; - volatilityVolatility: number; -} { - // Calculate rolling volatilities - const windowSize = 30; - const volatilities: number[] = []; - - for (let i = windowSize - 1; i < returns.length; i++) { - const window = returns.slice(i - windowSize + 1, i + 1); - const mean = window.reduce((sum, r) => sum + r, 0) / window.length; - const variance = window.reduce((sum, r) => sum + Math.pow(r - mean, 2), 0) / (window.length - 1); - volatilities.push(Math.sqrt(variance * 252)); // Annualized - } - - // Sort volatilities for VaR calculation - const sortedVols = [...volatilities].sort((a, b) => b - a); // Descending order - const varIndex = Math.floor(confidenceLevel * sortedVols.length); - const volatilityVaR = sortedVols[varIndex]; - - // Expected shortfall (average of worst volatilities) - const esVols = sortedVols.slice(0, varIndex + 1); - const expectedShortfall = esVols.reduce((sum, vol) => sum + vol, 0) / esVols.length; - - // Maximum volatility - const maxVolatility = Math.max(...volatilities); - - // Volatility of volatility - const meanVol = volatilities.reduce((sum, vol) => sum + vol, 0) / volatilities.length; - const volVariance = volatilities.reduce((sum, vol) => sum + Math.pow(vol - meanVol, 2), 0) / (volatilities.length - 1); - const volatilityVolatility = Math.sqrt(volVariance); - - return { - volatilityVaR, - expectedShortfall, - maxVolatility, - volatilityVolatility - }; -} - -/** - * Fix Yang-Zhang volatility calculation - */ -export function calculateYangZhangVolatility( - ohlcv: OHLCVData[], - annualizationFactor: number = 252 -): number { - if (ohlcv.length < 2) { - throw new Error('Need at least 2 observations for Yang-Zhang volatility calculation'); - } - - const n = ohlcv.length; - let overnightSum = 0; - let openToCloseSum = 0; - let rogersSatchellSum = 0; - - for (let i = 1; i < n; i++) { - const prev = ohlcv[i - 1]; - const curr = ohlcv[i]; - - // Overnight return (close to open) - const overnight = Math.log(curr.open / prev.close); - overnightSum += overnight * overnight; - - // Open to close return - const openToClose = Math.log(curr.close / curr.open); - openToCloseSum += openToClose * openToClose; - - // Rogers-Satchell component - const logHighOpen = Math.log(curr.high / curr.open); - const logHighClose = Math.log(curr.high / curr.close); - const logLowOpen = Math.log(curr.low / curr.open); - const logLowClose = Math.log(curr.low / curr.close); - rogersSatchellSum += logHighOpen * logHighClose + logLowOpen * logLowClose; - } - - // Yang-Zhang estimator - const k = 0.34 / (1.34 + (n + 1) / (n - 1)); // Drift adjustment factor - const yangZhangVariance = overnightSum / (n - 1) + - k * openToCloseSum / (n - 1) + - (1 - k) * rogersSatchellSum / (n - 1); - - return Math.sqrt(yangZhangVariance * annualizationFactor); -} - -/** - * Parkinson volatility estimator - */ -export function parkinsonVolatility( - ohlcv: OHLCVData[], - annualizationFactor: number = 252 -): number { - if (ohlcv.length < 2) return 0; - const sum = ohlcv - .slice(1) - .reduce((acc, curr) => { - const range = Math.log(curr.high / curr.low); - return acc + range * range; - }, 0); - return Math.sqrt((sum / (ohlcv.length - 1)) * annualizationFactor); -} - -/** - * Black-Scholes option pricing model - */ -function blackScholes( - spotPrice: number, - strikePrice: number, - timeToExpiry: number, - volatility: number, - riskFreeRate: number, - optionType: 'call' | 'put' -): number { - const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate + 0.5 * volatility * volatility) * timeToExpiry) / (volatility * Math.sqrt(timeToExpiry)); - const d2 = d1 - volatility * Math.sqrt(timeToExpiry); - - if (optionType === 'call') { - return spotPrice * normalCDF(d1) - strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(d2); - } else { - return strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(-d2) - spotPrice * normalCDF(-d1); - } -} - -/** - * Normal cumulative distribution function - */ -function normalCDF(x: number): number { - const a1 = 0.254829592; - const a2 = -0.284496736; - const a3 = 1.421060743; - const a4 = -1.453152027; - const a5 = 1.061405429; - const p = 0.3275911; - - const sign = x < 0 ? -1 : 1; - const absX = Math.abs(x); - const t = 1 / (1 + p * absX); - const y = 1 - (a1 * t + a2 * t * t + a3 * t * t * t + a4 * t * t * t * t + a5 * t * t * t * t * t) * Math.exp(-absX * absX / 2); - - return 0.5 * (1 + sign * y); -} - -/** - * Forecast volatility using EWMA - */ -export function forecastVolatilityEWMA( - volatilities: number[], - lambda: number = 0.94, - forecastHorizon: number = 1 -): number { - if (volatilities.length === 0) { - return 0; - } - - let forecast = volatilities[volatilities.length - 1]; - for (let i = 0; i < forecastHorizon; i++) { - forecast = lambda * forecast + (1 - lambda) * forecast; // Using the last value as the long-term average - } - return forecast; +/** + * Volatility Models + * Advanced volatility modeling and forecasting tools + */ + +// Local interface definition to avoid circular dependency +interface OHLCVData { + open: number; + high: number; + low: number; + close: number; + volume: number; + timestamp: Date; +} + +export interface GARCHParameters { + omega: number; // Constant term + alpha: number; // ARCH parameter + beta: number; // GARCH parameter + logLikelihood: number; + aic: number; + bic: number; +} + +export interface VolatilityEstimates { + closeToClose: number; + parkinson: number; + garmanKlass: number; + rogersSatchell: number; + yangZhang: number; +} + +export interface VolatilityRegime { + regime: number; + startDate: Date; + endDate: Date; + averageVolatility: number; + observations: number; +} + +export interface VolatilityTerm { + maturity: number; // Days to maturity + impliedVolatility: number; + confidence: number; +} + +export interface HestonParameters { + kappa: number; // Mean reversion speed + theta: number; // Long-term variance + sigma: number; // Volatility of variance + rho: number; // Correlation + v0: number; // Initial variance + logLikelihood: number; +} + +/** + * Calculate realized volatility using different estimators + */ +export function calculateRealizedVolatility( + ohlcv: OHLCVData[], + annualizationFactor: number = 252 +): VolatilityEstimates { + if (ohlcv.length < 2) { + throw new Error('Need at least 2 observations for volatility calculation'); + } + + const n = ohlcv.length; + let closeToCloseSum = 0; + let parkinsonSum = 0; + let garmanKlassSum = 0; + let rogersSatchellSum = 0; + let yangZhangSum = 0; + + // Calculate log returns and volatility estimators + for (let i = 1; i < n; i++) { + const prev = ohlcv[i - 1]; + const curr = ohlcv[i]; + + // Close-to-close + const logReturn = Math.log(curr.close / prev.close); + closeToCloseSum += logReturn * logReturn; + + // Parkinson estimator + const logHighLow = Math.log(curr.high / curr.low); + parkinsonSum += logHighLow * logHighLow; + + // Garman-Klass estimator + const logOpenClose = Math.log(curr.close / curr.open); + garmanKlassSum += 0.5 * logHighLow * logHighLow - (2 * Math.log(2) - 1) * logOpenClose * logOpenClose; + + // Rogers-Satchell estimator + const logHighOpen = Math.log(curr.high / curr.open); + const logHighClose = Math.log(curr.high / curr.close); + const logLowOpen = Math.log(curr.low / curr.open); + const logLowClose = Math.log(curr.low / curr.close); + rogersSatchellSum += logHighOpen * logHighClose + logLowOpen * logLowClose; + + // Yang-Zhang estimator components + const overnight = Math.log(curr.open / prev.close); + yangZhangSum += overnight * overnight + rogersSatchellSum / i; // Simplified for brevity + } + + return { + closeToClose: Math.sqrt((closeToCloseSum / (n - 1)) * annualizationFactor), + parkinson: Math.sqrt((parkinsonSum / (n - 1) / (4 * Math.log(2))) * annualizationFactor), + garmanKlass: Math.sqrt((garmanKlassSum / (n - 1)) * annualizationFactor), + rogersSatchell: Math.sqrt((rogersSatchellSum / (n - 1)) * annualizationFactor), + yangZhang: Math.sqrt((yangZhangSum / (n - 1)) * annualizationFactor) + }; +} + +/** + * Estimate GARCH(1,1) model parameters + */ +export function estimateGARCH( + returns: number[], + maxIterations: number = 100, + tolerance: number = 1e-6 +): GARCHParameters { + const n = returns.length; + + // Initial parameter estimates + let omega = 0.01; + let alpha = 0.05; + let beta = 0.9; + + // Calculate unconditional variance + const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n; + const unconditionalVar = returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1); + + let logLikelihood = -Infinity; + + for (let iter = 0; iter < maxIterations; iter++) { + const variances: number[] = [unconditionalVar]; + let newLogLikelihood = 0; + + // Calculate conditional variances + for (let t = 1; t < n; t++) { + const prevVar = variances[t - 1]; + const prevReturn = returns[t - 1] - meanReturn; + const currentVar = omega + alpha * prevReturn * prevReturn + beta * prevVar; + variances.push(Math.max(currentVar, 1e-8)); // Ensure positive variance + + // Add to log-likelihood + const currentReturn = returns[t] - meanReturn; + newLogLikelihood -= 0.5 * (Math.log(2 * Math.PI) + Math.log(currentVar) + + (currentReturn * currentReturn) / currentVar); + } + + // Check for convergence + if (Math.abs(newLogLikelihood - logLikelihood) < tolerance) { + break; + } + + logLikelihood = newLogLikelihood; + + // Simple gradient update (in practice, use more sophisticated optimization) + const gradientStep = 0.001; + omega = Math.max(0.001, omega + gradientStep); + alpha = Math.max(0.001, Math.min(0.999, alpha + gradientStep)); + beta = Math.max(0.001, Math.min(0.999 - alpha, beta + gradientStep)); + } + + // Calculate information criteria + const k = 3; // Number of parameters + const aic = -2 * logLikelihood + 2 * k; + const bic = -2 * logLikelihood + k * Math.log(n); + + return { + omega, + alpha, + beta, + logLikelihood, + aic, + bic + }; +} + +/** + * Calculate EWMA volatility + */ +export function calculateEWMAVolatility( + returns: number[], + lambda: number = 0.94, + annualizationFactor: number = 252 +): number[] { + const n = returns.length; + const volatilities: number[] = []; + + // Initialize with sample variance + const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n; + let variance = returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1); + + for (let t = 0; t < n; t++) { + if (t > 0) { + const prevReturn = returns[t - 1] - meanReturn; + variance = lambda * variance + (1 - lambda) * prevReturn * prevReturn; + } + volatilities.push(Math.sqrt(variance * annualizationFactor)); + } + + return volatilities; +} + +/** + * Identify volatility regimes + */ +export function identifyVolatilityRegimes( + returns: number[], + numRegimes: number = 3, + windowSize: number = 60 +): VolatilityRegime[] { + // Calculate rolling volatility + const rollingVol: number[] = []; + const timestamps: Date[] = []; + + for (let i = windowSize - 1; i < returns.length; i++) { + const window = returns.slice(i - windowSize + 1, i + 1); + const mean = window.reduce((sum, r) => sum + r, 0) / window.length; + const variance = window.reduce((sum, r) => sum + Math.pow(r - mean, 2), 0) / (window.length - 1); + rollingVol.push(Math.sqrt(variance * 252)); // Annualized + timestamps.push(new Date(Date.now() + i * 24 * 60 * 60 * 1000)); // Mock timestamps + } + + // Simple k-means clustering on absolute returns + const absReturns = returns.map(ret => Math.abs(ret)); + const sortedReturns = [...absReturns].sort((a, b) => a - b); + + // Define regime thresholds + const thresholds: number[] = []; + for (let i = 1; i < numRegimes; i++) { + const index = Math.floor((i / numRegimes) * sortedReturns.length); + thresholds.push(sortedReturns[index]); + } + + // Classify returns into regimes + const regimeSequence = absReturns.map(absRet => { + for (let i = 0; i < thresholds.length; i++) { + if (absRet <= thresholds[i]) return i; + } + return numRegimes - 1; + }); + + // Calculate regime statistics + const regimes: VolatilityRegime[] = []; + for (let regime = 0; regime < numRegimes; regime++) { + const regimeIndices = regimeSequence + .map((r, idx) => r === regime ? idx : -1) + .filter(idx => idx !== -1); + + if (regimeIndices.length > 0) { + const regimeVolatilities = regimeIndices.map(idx => + idx < rollingVol.length ? rollingVol[idx] : 0 + ); + const avgVol = regimeVolatilities.reduce((sum, vol) => sum + vol, 0) / regimeVolatilities.length; + + regimes.push({ + regime, + startDate: new Date(Date.now()), + endDate: new Date(Date.now() + regimeIndices.length * 24 * 60 * 60 * 1000), + averageVolatility: avgVol, + observations: regimeIndices.length + }); + } + } + + return regimes; +} + +/** + * Calculate volatility term structure + */ +export function calculateVolatilityTermStructure( + spotVol: number, + maturities: number[], + meanReversion: number = 0.5 +): VolatilityTerm[] { + return maturities.map(maturity => { + // Simple mean reversion model for term structure + const timeToMaturity = maturity / 365; // Convert to years + const termVolatility = spotVol * Math.exp(-meanReversion * timeToMaturity); + + return { + maturity, + impliedVolatility: Math.max(termVolatility, 0.01), // Floor at 1% + confidence: Math.exp(-timeToMaturity) // Confidence decreases with maturity + }; + }); +} + +/** + * Calculate volatility smile/skew parameters + */ +export function calculateVolatilitySmile( + strikes: number[], + spotPrice: number, + impliedVols: number[] +): { + atmVolatility: number; + skew: number; + convexity: number; + riskReversal: number; +} { + if (strikes.length !== impliedVols.length || strikes.length < 3) { + throw new Error('Need at least 3 strikes with corresponding implied volatilities'); + } + + // Find ATM volatility + const atmIndex = strikes.reduce((closest, strike, idx) => + Math.abs(strike - spotPrice) < Math.abs(strikes[closest] - spotPrice) ? idx : closest, 0 + ); + const atmVolatility = impliedVols[atmIndex]; + + // Calculate skew (derivative at ATM) + let skew = 0; + if (atmIndex > 0 && atmIndex < strikes.length - 1) { + const deltaStrike = strikes[atmIndex + 1] - strikes[atmIndex - 1]; + const deltaVol = impliedVols[atmIndex + 1] - impliedVols[atmIndex - 1]; + skew = deltaVol / deltaStrike; + } + + // Calculate convexity (second derivative) + let convexity = 0; + if (atmIndex > 0 && atmIndex < strikes.length - 1) { + const h = strikes[atmIndex + 1] - strikes[atmIndex]; + convexity = (impliedVols[atmIndex + 1] - 2 * impliedVols[atmIndex] + impliedVols[atmIndex - 1]) / (h * h); + } + + // Risk reversal (put-call vol difference) + const otmPutIndex = strikes.findIndex(strike => strike < spotPrice * 0.9); + const otmCallIndex = strikes.findIndex(strike => strike > spotPrice * 1.1); + let riskReversal = 0; + + if (otmPutIndex !== -1 && otmCallIndex !== -1) { + riskReversal = impliedVols[otmCallIndex] - impliedVols[otmPutIndex]; + } + + return { + atmVolatility, + skew, + convexity, + riskReversal + }; +} + +/** + * Estimate Heston stochastic volatility model parameters + */ +export function estimateHestonParameters( + returns: number[], + maxIterations: number = 100 +): HestonParameters { + const n = returns.length; + + if (n < 10) { + throw new Error('Need at least 10 observations for Heston parameter estimation'); + } + + // Initial parameter estimates + let kappa = 2.0; // Mean reversion speed + let theta = 0.04; // Long-term variance + let sigma = 0.3; // Volatility of variance + let rho = -0.5; // Correlation + let v0 = 0.04; // Initial variance + + // Calculate sample statistics for initialization + const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n; + const sampleVariance = returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1); + + theta = sampleVariance; + v0 = sampleVariance; + + let logLikelihood = -Infinity; + + for (let iter = 0; iter < maxIterations; iter++) { + let newLogLikelihood = 0; + let currentVariance = v0; + + for (let t = 1; t < n; t++) { + const dt = 1.0; // Assuming daily data + const prevReturn = returns[t - 1]; + + // Euler discretization of variance process + const dW1 = Math.random() - 0.5; // Simplified random shock + const dW2 = rho * dW1 + Math.sqrt(1 - rho * rho) * (Math.random() - 0.5); + + const varianceChange = kappa * (theta - currentVariance) * dt + + sigma * Math.sqrt(Math.max(currentVariance, 0)) * dW2; + + currentVariance = Math.max(currentVariance + varianceChange, 0.001); + + // Log-likelihood contribution (simplified) + const expectedReturn = meanReturn; + const variance = currentVariance; + + if (variance > 0) { + newLogLikelihood -= 0.5 * Math.log(2 * Math.PI * variance); + newLogLikelihood -= 0.5 * Math.pow(returns[t] - expectedReturn, 2) / variance; + } + } + + // Check for convergence + if (Math.abs(newLogLikelihood - logLikelihood) < 1e-6) { + break; + } + + logLikelihood = newLogLikelihood; + + // Simple parameter updates (in practice, use maximum likelihood estimation) + const learningRate = 0.001; + kappa = Math.max(0.1, Math.min(10, kappa + learningRate)); + theta = Math.max(0.001, Math.min(1, theta + learningRate)); + sigma = Math.max(0.01, Math.min(2, sigma + learningRate)); + rho = Math.max(-0.99, Math.min(0.99, rho + learningRate * 0.1)); + v0 = Math.max(0.001, Math.min(1, v0 + learningRate)); + } + + return { + kappa, + theta, + sigma, + rho, + v0, + logLikelihood + }; +} + +/** + * Calculate volatility risk metrics + */ +export function calculateVolatilityRisk( + returns: number[], + confidenceLevel: number = 0.05 +): { + volatilityVaR: number; + expectedShortfall: number; + maxVolatility: number; + volatilityVolatility: number; +} { + // Calculate rolling volatilities + const windowSize = 30; + const volatilities: number[] = []; + + for (let i = windowSize - 1; i < returns.length; i++) { + const window = returns.slice(i - windowSize + 1, i + 1); + const mean = window.reduce((sum, r) => sum + r, 0) / window.length; + const variance = window.reduce((sum, r) => sum + Math.pow(r - mean, 2), 0) / (window.length - 1); + volatilities.push(Math.sqrt(variance * 252)); // Annualized + } + + // Sort volatilities for VaR calculation + const sortedVols = [...volatilities].sort((a, b) => b - a); // Descending order + const varIndex = Math.floor(confidenceLevel * sortedVols.length); + const volatilityVaR = sortedVols[varIndex]; + + // Expected shortfall (average of worst volatilities) + const esVols = sortedVols.slice(0, varIndex + 1); + const expectedShortfall = esVols.reduce((sum, vol) => sum + vol, 0) / esVols.length; + + // Maximum volatility + const maxVolatility = Math.max(...volatilities); + + // Volatility of volatility + const meanVol = volatilities.reduce((sum, vol) => sum + vol, 0) / volatilities.length; + const volVariance = volatilities.reduce((sum, vol) => sum + Math.pow(vol - meanVol, 2), 0) / (volatilities.length - 1); + const volatilityVolatility = Math.sqrt(volVariance); + + return { + volatilityVaR, + expectedShortfall, + maxVolatility, + volatilityVolatility + }; +} + +/** + * Fix Yang-Zhang volatility calculation + */ +export function calculateYangZhangVolatility( + ohlcv: OHLCVData[], + annualizationFactor: number = 252 +): number { + if (ohlcv.length < 2) { + throw new Error('Need at least 2 observations for Yang-Zhang volatility calculation'); + } + + const n = ohlcv.length; + let overnightSum = 0; + let openToCloseSum = 0; + let rogersSatchellSum = 0; + + for (let i = 1; i < n; i++) { + const prev = ohlcv[i - 1]; + const curr = ohlcv[i]; + + // Overnight return (close to open) + const overnight = Math.log(curr.open / prev.close); + overnightSum += overnight * overnight; + + // Open to close return + const openToClose = Math.log(curr.close / curr.open); + openToCloseSum += openToClose * openToClose; + + // Rogers-Satchell component + const logHighOpen = Math.log(curr.high / curr.open); + const logHighClose = Math.log(curr.high / curr.close); + const logLowOpen = Math.log(curr.low / curr.open); + const logLowClose = Math.log(curr.low / curr.close); + rogersSatchellSum += logHighOpen * logHighClose + logLowOpen * logLowClose; + } + + // Yang-Zhang estimator + const k = 0.34 / (1.34 + (n + 1) / (n - 1)); // Drift adjustment factor + const yangZhangVariance = overnightSum / (n - 1) + + k * openToCloseSum / (n - 1) + + (1 - k) * rogersSatchellSum / (n - 1); + + return Math.sqrt(yangZhangVariance * annualizationFactor); +} + +/** + * Parkinson volatility estimator + */ +export function parkinsonVolatility( + ohlcv: OHLCVData[], + annualizationFactor: number = 252 +): number { + if (ohlcv.length < 2) return 0; + const sum = ohlcv + .slice(1) + .reduce((acc, curr) => { + const range = Math.log(curr.high / curr.low); + return acc + range * range; + }, 0); + return Math.sqrt((sum / (ohlcv.length - 1)) * annualizationFactor); +} + +/** + * Black-Scholes option pricing model + */ +function blackScholes( + spotPrice: number, + strikePrice: number, + timeToExpiry: number, + volatility: number, + riskFreeRate: number, + optionType: 'call' | 'put' +): number { + const d1 = (Math.log(spotPrice / strikePrice) + (riskFreeRate + 0.5 * volatility * volatility) * timeToExpiry) / (volatility * Math.sqrt(timeToExpiry)); + const d2 = d1 - volatility * Math.sqrt(timeToExpiry); + + if (optionType === 'call') { + return spotPrice * normalCDF(d1) - strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(d2); + } else { + return strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(-d2) - spotPrice * normalCDF(-d1); + } +} + +/** + * Normal cumulative distribution function + */ +function normalCDF(x: number): number { + const a1 = 0.254829592; + const a2 = -0.284496736; + const a3 = 1.421060743; + const a4 = -1.453152027; + const a5 = 1.061405429; + const p = 0.3275911; + + const sign = x < 0 ? -1 : 1; + const absX = Math.abs(x); + const t = 1 / (1 + p * absX); + const y = 1 - (a1 * t + a2 * t * t + a3 * t * t * t + a4 * t * t * t * t + a5 * t * t * t * t * t) * Math.exp(-absX * absX / 2); + + return 0.5 * (1 + sign * y); +} + +/** + * Forecast volatility using EWMA + */ +export function forecastVolatilityEWMA( + volatilities: number[], + lambda: number = 0.94, + forecastHorizon: number = 1 +): number { + if (volatilities.length === 0) { + return 0; + } + + let forecast = volatilities[volatilities.length - 1]; + for (let i = 0; i < forecastHorizon; i++) { + forecast = lambda * forecast + (1 - lambda) * forecast; // Using the last value as the long-term average + } + return forecast; } \ No newline at end of file diff --git a/libs/utils/src/dateUtils.ts b/libs/utils/src/dateUtils.ts index e1031db..97b8d9c 100644 --- a/libs/utils/src/dateUtils.ts +++ b/libs/utils/src/dateUtils.ts @@ -1,55 +1,55 @@ -/** - * Date and time utilities for working with market data - */ -export const dateUtils = { - /** - * Check if a date is a trading day (Monday-Friday, non-holiday) - * This is a simplified implementation - a real version would check market holidays - */ - isTradingDay(date: Date): boolean { - const day = date.getDay(); - return day > 0 && day < 6; // Mon-Fri - }, - - /** - * Get the next trading day from a given date - */ - getNextTradingDay(date: Date): Date { - const nextDay = new Date(date); - nextDay.setDate(nextDay.getDate() + 1); - - while (!this.isTradingDay(nextDay)) { - nextDay.setDate(nextDay.getDate() + 1); - } - - return nextDay; - }, - - /** - * Get the previous trading day from a given date - */ - getPreviousTradingDay(date: Date): Date { - const prevDay = new Date(date); - prevDay.setDate(prevDay.getDate() - 1); - - while (!this.isTradingDay(prevDay)) { - prevDay.setDate(prevDay.getDate() - 1); - } - - return prevDay; - }, - - /** - * Format a date as YYYY-MM-DD - */ - formatDate(date: Date): string { - return date.toISOString().split('T')[0]; - }, - - /** - * Parse a date string in YYYY-MM-DD format - */ - parseDate(dateStr: string): Date { - return new Date(dateStr); - } -}; +/** + * Date and time utilities for working with market data + */ +export const dateUtils = { + /** + * Check if a date is a trading day (Monday-Friday, non-holiday) + * This is a simplified implementation - a real version would check market holidays + */ + isTradingDay(date: Date): boolean { + const day = date.getDay(); + return day > 0 && day < 6; // Mon-Fri + }, + + /** + * Get the next trading day from a given date + */ + getNextTradingDay(date: Date): Date { + const nextDay = new Date(date); + nextDay.setDate(nextDay.getDate() + 1); + + while (!this.isTradingDay(nextDay)) { + nextDay.setDate(nextDay.getDate() + 1); + } + + return nextDay; + }, + + /** + * Get the previous trading day from a given date + */ + getPreviousTradingDay(date: Date): Date { + const prevDay = new Date(date); + prevDay.setDate(prevDay.getDate() - 1); + + while (!this.isTradingDay(prevDay)) { + prevDay.setDate(prevDay.getDate() - 1); + } + + return prevDay; + }, + + /** + * Format a date as YYYY-MM-DD + */ + formatDate(date: Date): string { + return date.toISOString().split('T')[0]; + }, + + /** + * Parse a date string in YYYY-MM-DD format + */ + parseDate(dateStr: string): Date { + return new Date(dateStr); + } +}; diff --git a/libs/utils/src/index.ts b/libs/utils/src/index.ts index b06b8fb..6a01eb3 100644 --- a/libs/utils/src/index.ts +++ b/libs/utils/src/index.ts @@ -1,2 +1,2 @@ -export * from './dateUtils'; +export * from './dateUtils'; export * from './calculations/index'; \ No newline at end of file diff --git a/libs/utils/test/calculations/position-sizing.test.ts b/libs/utils/test/calculations/position-sizing.test.ts index 3500285..833db6d 100644 --- a/libs/utils/test/calculations/position-sizing.test.ts +++ b/libs/utils/test/calculations/position-sizing.test.ts @@ -1,403 +1,403 @@ -/** - * Test suite for position sizing calculations - */ -import { describe, it, expect } from 'bun:test'; -import { - fixedRiskPositionSize, - kellyPositionSize, - fractionalKellyPositionSize, - volatilityTargetPositionSize, - equalWeightPositionSize, - atrBasedPositionSize, - expectancyPositionSize, - monteCarloPositionSize, - sharpeOptimizedPositionSize, - fixedFractionalPositionSize, - volatilityAdjustedPositionSize, - correlationAdjustedPositionSize, - calculatePortfolioHeat, - dynamicPositionSize, - liquidityConstrainedPositionSize, - multiTimeframePositionSize, - riskParityPositionSize, - validatePositionSize, - type PositionSizeParams, - type KellyParams, - type VolatilityParams -} from '../../src/calculations/position-sizing'; - -describe('Position Sizing Calculations', () => { - describe('fixedRiskPositionSize', () => { - it('should calculate correct position size for long position', () => { - const params: PositionSizeParams = { - accountSize: 100000, - riskPercentage: 2, - entryPrice: 100, - stopLoss: 95, - leverage: 1 - }; - - const result = fixedRiskPositionSize(params); - // Risk amount: 100000 * 0.02 = 2000 - // Risk per share: 100 - 95 = 5 - // Position size: 2000 / 5 = 400 shares - expect(result).toBe(400); - }); - - it('should calculate correct position size for short position', () => { - const params: PositionSizeParams = { - accountSize: 100000, - riskPercentage: 2, - entryPrice: 100, - stopLoss: 105, - leverage: 1 - }; - - const result = fixedRiskPositionSize(params); - // Risk per share: |100 - 105| = 5 - // Position size: 2000 / 5 = 400 shares - expect(result).toBe(400); - }); - - it('should return 0 for invalid inputs', () => { - const params: PositionSizeParams = { - accountSize: 0, - riskPercentage: 2, - entryPrice: 100, - stopLoss: 95 - }; - - expect(fixedRiskPositionSize(params)).toBe(0); - }); - - it('should return 0 when entry price equals stop loss', () => { - const params: PositionSizeParams = { - accountSize: 100000, - riskPercentage: 2, - entryPrice: 100, - stopLoss: 100 - }; - - expect(fixedRiskPositionSize(params)).toBe(0); - }); - }); - - describe('kellyPositionSize', () => { - it('should calculate correct Kelly position size', () => { - const params: KellyParams = { - winRate: 0.6, - averageWin: 150, - averageLoss: -100 - }; - - const result = kellyPositionSize(params, 100000); - - // Kelly formula: f = (bp - q) / b - // b = 150/100 = 1.5, p = 0.6, q = 0.4 - // f = (1.5 * 0.6 - 0.4) / 1.5 = (0.9 - 0.4) / 1.5 = 0.5 / 1.5 = 0.333 - // With safety factor of 0.25: 0.333 * 0.25 = 0.083 - // Capped at 0.25, so result should be 0.083 - // Position: 100000 * 0.083 = 8300 - expect(result).toBeCloseTo(8333, 0); - }); - - it('should return 0 for negative expectancy', () => { - const params: KellyParams = { - winRate: 0.3, - averageWin: 100, - averageLoss: -200 - }; - - const result = kellyPositionSize(params, 100000); - expect(result).toBe(0); - }); - - it('should return 0 for invalid inputs', () => { - const params: KellyParams = { - winRate: 0, - averageWin: 100, - averageLoss: -100 - }; - - expect(kellyPositionSize(params, 100000)).toBe(0); - }); - }); - - describe('volatilityTargetPositionSize', () => { - it('should calculate correct volatility-targeted position size', () => { - const params: VolatilityParams = { - price: 100, - volatility: 0.20, - targetVolatility: 0.10, - lookbackDays: 30 - }; - - const result = volatilityTargetPositionSize(params, 100000); - - // Volatility ratio: 0.10 / 0.20 = 0.5 - // Position value: 100000 * 0.5 = 50000 - // Position size: 50000 / 100 = 500 shares - expect(result).toBe(500); - }); - - it('should cap leverage at 2x', () => { - const params: VolatilityParams = { - price: 100, - volatility: 0.05, - targetVolatility: 0.20, - lookbackDays: 30 - }; - - const result = volatilityTargetPositionSize(params, 100000); - - // Volatility ratio would be 4, but capped at 2 - // Position value: 100000 * 2 = 200000 - // Position size: 200000 / 100 = 2000 shares - expect(result).toBe(2000); - }); - }); - - describe('equalWeightPositionSize', () => { - it('should calculate equal weight position size', () => { - const result = equalWeightPositionSize(100000, 5, 100); - - // Position value per asset: 100000 / 5 = 20000 - // Position size: 20000 / 100 = 200 shares - expect(result).toBe(200); - }); - - it('should return 0 for invalid inputs', () => { - expect(equalWeightPositionSize(100000, 0, 100)).toBe(0); - expect(equalWeightPositionSize(100000, 5, 0)).toBe(0); - }); - }); - - describe('atrBasedPositionSize', () => { - it('should calculate ATR-based position size', () => { - const result = atrBasedPositionSize(100000, 2, 5, 2, 100); - - // Risk amount: 100000 * 0.02 = 2000 - // Stop distance: 5 * 2 = 10 - // Position size: 2000 / 10 = 200 shares - expect(result).toBe(200); - }); - - it('should return 0 for zero ATR', () => { - const result = atrBasedPositionSize(100000, 2, 0, 2, 100); - expect(result).toBe(0); - }); - }); - - describe('expectancyPositionSize', () => { - it('should calculate expectancy-based position size', () => { - const result = expectancyPositionSize(100000, 0.6, 150, -100, 5); - - // Expectancy: 0.6 * 150 - 0.4 * 100 = 90 - 40 = 50 - // Expectancy ratio: 50 / 100 = 0.5 - // Risk percentage: min(0.5 * 0.5, 5) = min(0.25, 5) = 0.25 - // Position: 100000 * 0.0025 = 250 - expect(result).toBe(250); - }); - - it('should return 0 for negative expectancy', () => { - const result = expectancyPositionSize(100000, 0.3, 100, -200); - expect(result).toBe(0); - }); - }); - - describe('correlationAdjustedPositionSize', () => { - it('should adjust position size based on correlation', () => { - const existingPositions = [ - { size: 1000, correlation: 0.5 }, - { size: 500, correlation: 0.3 } - ]; - - const result = correlationAdjustedPositionSize(1000, existingPositions, 0.5); - - // Should reduce position size based on correlation risk - expect(result).toBeLessThan(1000); - expect(result).toBeGreaterThan(0); - }); - - it('should return original size when no existing positions', () => { - const result = correlationAdjustedPositionSize(1000, [], 0.5); - expect(result).toBe(1000); - }); - }); - - describe('calculatePortfolioHeat', () => { - it('should calculate portfolio heat correctly', () => { - const positions = [ - { value: 10000, risk: 500 }, - { value: 15000, risk: 750 }, - { value: 20000, risk: 1000 } - ]; - - const result = calculatePortfolioHeat(positions, 100000); - - // Total risk: 500 + 750 + 1000 = 2250 - // Heat: (2250 / 100000) * 100 = 2.25% - expect(result).toBe(2.25); - }); - - it('should handle empty positions array', () => { - const result = calculatePortfolioHeat([], 100000); - expect(result).toBe(0); - }); - - it('should cap heat at 100%', () => { - const positions = [ - { value: 50000, risk: 150000 } - ]; - - const result = calculatePortfolioHeat(positions, 100000); - expect(result).toBe(100); - }); - }); - - describe('dynamicPositionSize', () => { - it('should adjust position size based on market conditions', () => { - const result = dynamicPositionSize(1000, 0.25, 0.15, 0.05, 0.10); - - // Volatility adjustment: 0.15 / 0.25 = 0.6 - // Drawdown adjustment: 1 - (0.05 / 0.10) = 0.5 - // Adjusted size: 1000 * 0.6 * 0.5 = 300 - expect(result).toBe(300); - }); - - it('should handle high drawdown', () => { - const result = dynamicPositionSize(1000, 0.20, 0.15, 0.15, 0.10); - - // Should significantly reduce position size due to high drawdown - expect(result).toBeLessThan(500); - }); - }); - - describe('liquidityConstrainedPositionSize', () => { - it('should constrain position size based on liquidity', () => { - const result = liquidityConstrainedPositionSize(1000, 10000, 0.05, 100); - - // Max shares: 10000 * 0.05 = 500 - // Should return min(1000, 500) = 500 - expect(result).toBe(500); - }); - - it('should return desired size when liquidity allows', () => { - const result = liquidityConstrainedPositionSize(500, 20000, 0.05, 100); - - // Max shares: 20000 * 0.05 = 1000 - // Should return min(500, 1000) = 500 - expect(result).toBe(500); - }); - }); - - describe('multiTimeframePositionSize', () => { - it('should weight signals correctly', () => { - const result = multiTimeframePositionSize(100000, 0.8, 0.6, 0.4, 2); - - // Weighted signal: 0.8 * 0.2 + 0.6 * 0.3 + 0.4 * 0.5 = 0.16 + 0.18 + 0.2 = 0.54 - // Adjusted risk: 2 * 0.54 = 1.08% - // Position: 100000 * 0.0108 = 1080 - expect(result).toBe(1080); - }); - - it('should clamp signals to valid range', () => { - const result = multiTimeframePositionSize(100000, 2, -2, 1.5, 2); - - // Signals should be clamped to [-1, 1] - // Weighted: 1 * 0.2 + (-1) * 0.3 + 1 * 0.5 = 0.2 - 0.3 + 0.5 = 0.4 - // Adjusted risk: 2 * 0.4 = 0.8% - expect(result).toBe(800); - }); - }); - - describe('riskParityPositionSize', () => { - it('should allocate based on inverse volatility', () => { - const assets = [ - { volatility: 0.10, price: 100 }, - { volatility: 0.20, price: 200 } - ]; - - const result = riskParityPositionSize(assets, 0.15, 100000); - - // Asset 1: 1/0.10 = 10, Asset 2: 1/0.20 = 5 - // Total inverse vol: 15 - // Weights: Asset 1: 10/15 = 0.667, Asset 2: 5/15 = 0.333 - expect(result).toHaveLength(2); - expect(result[0]).toBeGreaterThan(result[1]); - }); - - it('should handle zero volatility assets', () => { - const assets = [ - { volatility: 0, price: 100 }, - { volatility: 0.20, price: 200 } - ]; - - const result = riskParityPositionSize(assets, 0.15, 100000); - - expect(result[0]).toBe(0); - expect(result[1]).toBeGreaterThan(0); - }); - }); - - describe('sharpeOptimizedPositionSize', () => { - it('should calculate position size based on Sharpe optimization', () => { - const result = sharpeOptimizedPositionSize(100000, 0.15, 0.20, 0.02, 3); - - // Kelly formula for continuous returns: f = (ΞΌ - r) / σ² - // Expected return: 0.15, Risk-free: 0.02, Volatility: 0.20 - // f = (0.15 - 0.02) / (0.20)Β² = 0.13 / 0.04 = 3.25 - // But capped at maxLeverage=3, so should be 3.0 - // Final position: 100000 * 3 = 300000 - expect(result).toBe(300000); - }); - - it('should return 0 for invalid inputs', () => { - // Invalid volatility - expect(sharpeOptimizedPositionSize(100000, 0.15, 0, 0.02)).toBe(0); - - // Invalid account size - expect(sharpeOptimizedPositionSize(0, 0.15, 0.20, 0.02)).toBe(0); - - // Expected return less than risk-free rate - expect(sharpeOptimizedPositionSize(100000, 0.01, 0.20, 0.02)).toBe(0); - }); - - it('should respect maximum leverage', () => { - const result = sharpeOptimizedPositionSize(100000, 0.30, 0.20, 0.02, 2); - - // Kelly fraction would be (0.30 - 0.02) / (0.20)Β² = 7, but capped at 2 - // Position: 100000 * 2 = 200000 - expect(result).toBe(200000); - }); - }); - - describe('validatePositionSize', () => { - it('should validate position size against limits', () => { - const result = validatePositionSize(500, 100, 100000, 10, 2); - - // Position value: 500 * 100 = 50000 (50% of account) - // This exceeds 10% limit - expect(result.isValid).toBe(false); - expect(result.violations).toContain('Position exceeds maximum 10% of account'); - expect(result.adjustedSize).toBe(100); // 10000 / 100 - }); - - it('should pass validation for reasonable position', () => { - const result = validatePositionSize(50, 100, 100000, 10, 2); - - // Position value: 50 * 100 = 5000 (5% of account) - expect(result.isValid).toBe(true); - expect(result.violations).toHaveLength(0); - expect(result.adjustedSize).toBe(50); - }); - - it('should handle fractional shares', () => { - const result = validatePositionSize(0.5, 100, 100000, 10, 2); - - expect(result.isValid).toBe(false); - expect(result.violations).toContain('Position size too small (less than 1 share)'); - expect(result.adjustedSize).toBe(0); - }); - }); -}); +/** + * Test suite for position sizing calculations + */ +import { describe, it, expect } from 'bun:test'; +import { + fixedRiskPositionSize, + kellyPositionSize, + fractionalKellyPositionSize, + volatilityTargetPositionSize, + equalWeightPositionSize, + atrBasedPositionSize, + expectancyPositionSize, + monteCarloPositionSize, + sharpeOptimizedPositionSize, + fixedFractionalPositionSize, + volatilityAdjustedPositionSize, + correlationAdjustedPositionSize, + calculatePortfolioHeat, + dynamicPositionSize, + liquidityConstrainedPositionSize, + multiTimeframePositionSize, + riskParityPositionSize, + validatePositionSize, + type PositionSizeParams, + type KellyParams, + type VolatilityParams +} from '../../src/calculations/position-sizing'; + +describe('Position Sizing Calculations', () => { + describe('fixedRiskPositionSize', () => { + it('should calculate correct position size for long position', () => { + const params: PositionSizeParams = { + accountSize: 100000, + riskPercentage: 2, + entryPrice: 100, + stopLoss: 95, + leverage: 1 + }; + + const result = fixedRiskPositionSize(params); + // Risk amount: 100000 * 0.02 = 2000 + // Risk per share: 100 - 95 = 5 + // Position size: 2000 / 5 = 400 shares + expect(result).toBe(400); + }); + + it('should calculate correct position size for short position', () => { + const params: PositionSizeParams = { + accountSize: 100000, + riskPercentage: 2, + entryPrice: 100, + stopLoss: 105, + leverage: 1 + }; + + const result = fixedRiskPositionSize(params); + // Risk per share: |100 - 105| = 5 + // Position size: 2000 / 5 = 400 shares + expect(result).toBe(400); + }); + + it('should return 0 for invalid inputs', () => { + const params: PositionSizeParams = { + accountSize: 0, + riskPercentage: 2, + entryPrice: 100, + stopLoss: 95 + }; + + expect(fixedRiskPositionSize(params)).toBe(0); + }); + + it('should return 0 when entry price equals stop loss', () => { + const params: PositionSizeParams = { + accountSize: 100000, + riskPercentage: 2, + entryPrice: 100, + stopLoss: 100 + }; + + expect(fixedRiskPositionSize(params)).toBe(0); + }); + }); + + describe('kellyPositionSize', () => { + it('should calculate correct Kelly position size', () => { + const params: KellyParams = { + winRate: 0.6, + averageWin: 150, + averageLoss: -100 + }; + + const result = kellyPositionSize(params, 100000); + + // Kelly formula: f = (bp - q) / b + // b = 150/100 = 1.5, p = 0.6, q = 0.4 + // f = (1.5 * 0.6 - 0.4) / 1.5 = (0.9 - 0.4) / 1.5 = 0.5 / 1.5 = 0.333 + // With safety factor of 0.25: 0.333 * 0.25 = 0.083 + // Capped at 0.25, so result should be 0.083 + // Position: 100000 * 0.083 = 8300 + expect(result).toBeCloseTo(8333, 0); + }); + + it('should return 0 for negative expectancy', () => { + const params: KellyParams = { + winRate: 0.3, + averageWin: 100, + averageLoss: -200 + }; + + const result = kellyPositionSize(params, 100000); + expect(result).toBe(0); + }); + + it('should return 0 for invalid inputs', () => { + const params: KellyParams = { + winRate: 0, + averageWin: 100, + averageLoss: -100 + }; + + expect(kellyPositionSize(params, 100000)).toBe(0); + }); + }); + + describe('volatilityTargetPositionSize', () => { + it('should calculate correct volatility-targeted position size', () => { + const params: VolatilityParams = { + price: 100, + volatility: 0.20, + targetVolatility: 0.10, + lookbackDays: 30 + }; + + const result = volatilityTargetPositionSize(params, 100000); + + // Volatility ratio: 0.10 / 0.20 = 0.5 + // Position value: 100000 * 0.5 = 50000 + // Position size: 50000 / 100 = 500 shares + expect(result).toBe(500); + }); + + it('should cap leverage at 2x', () => { + const params: VolatilityParams = { + price: 100, + volatility: 0.05, + targetVolatility: 0.20, + lookbackDays: 30 + }; + + const result = volatilityTargetPositionSize(params, 100000); + + // Volatility ratio would be 4, but capped at 2 + // Position value: 100000 * 2 = 200000 + // Position size: 200000 / 100 = 2000 shares + expect(result).toBe(2000); + }); + }); + + describe('equalWeightPositionSize', () => { + it('should calculate equal weight position size', () => { + const result = equalWeightPositionSize(100000, 5, 100); + + // Position value per asset: 100000 / 5 = 20000 + // Position size: 20000 / 100 = 200 shares + expect(result).toBe(200); + }); + + it('should return 0 for invalid inputs', () => { + expect(equalWeightPositionSize(100000, 0, 100)).toBe(0); + expect(equalWeightPositionSize(100000, 5, 0)).toBe(0); + }); + }); + + describe('atrBasedPositionSize', () => { + it('should calculate ATR-based position size', () => { + const result = atrBasedPositionSize(100000, 2, 5, 2, 100); + + // Risk amount: 100000 * 0.02 = 2000 + // Stop distance: 5 * 2 = 10 + // Position size: 2000 / 10 = 200 shares + expect(result).toBe(200); + }); + + it('should return 0 for zero ATR', () => { + const result = atrBasedPositionSize(100000, 2, 0, 2, 100); + expect(result).toBe(0); + }); + }); + + describe('expectancyPositionSize', () => { + it('should calculate expectancy-based position size', () => { + const result = expectancyPositionSize(100000, 0.6, 150, -100, 5); + + // Expectancy: 0.6 * 150 - 0.4 * 100 = 90 - 40 = 50 + // Expectancy ratio: 50 / 100 = 0.5 + // Risk percentage: min(0.5 * 0.5, 5) = min(0.25, 5) = 0.25 + // Position: 100000 * 0.0025 = 250 + expect(result).toBe(250); + }); + + it('should return 0 for negative expectancy', () => { + const result = expectancyPositionSize(100000, 0.3, 100, -200); + expect(result).toBe(0); + }); + }); + + describe('correlationAdjustedPositionSize', () => { + it('should adjust position size based on correlation', () => { + const existingPositions = [ + { size: 1000, correlation: 0.5 }, + { size: 500, correlation: 0.3 } + ]; + + const result = correlationAdjustedPositionSize(1000, existingPositions, 0.5); + + // Should reduce position size based on correlation risk + expect(result).toBeLessThan(1000); + expect(result).toBeGreaterThan(0); + }); + + it('should return original size when no existing positions', () => { + const result = correlationAdjustedPositionSize(1000, [], 0.5); + expect(result).toBe(1000); + }); + }); + + describe('calculatePortfolioHeat', () => { + it('should calculate portfolio heat correctly', () => { + const positions = [ + { value: 10000, risk: 500 }, + { value: 15000, risk: 750 }, + { value: 20000, risk: 1000 } + ]; + + const result = calculatePortfolioHeat(positions, 100000); + + // Total risk: 500 + 750 + 1000 = 2250 + // Heat: (2250 / 100000) * 100 = 2.25% + expect(result).toBe(2.25); + }); + + it('should handle empty positions array', () => { + const result = calculatePortfolioHeat([], 100000); + expect(result).toBe(0); + }); + + it('should cap heat at 100%', () => { + const positions = [ + { value: 50000, risk: 150000 } + ]; + + const result = calculatePortfolioHeat(positions, 100000); + expect(result).toBe(100); + }); + }); + + describe('dynamicPositionSize', () => { + it('should adjust position size based on market conditions', () => { + const result = dynamicPositionSize(1000, 0.25, 0.15, 0.05, 0.10); + + // Volatility adjustment: 0.15 / 0.25 = 0.6 + // Drawdown adjustment: 1 - (0.05 / 0.10) = 0.5 + // Adjusted size: 1000 * 0.6 * 0.5 = 300 + expect(result).toBe(300); + }); + + it('should handle high drawdown', () => { + const result = dynamicPositionSize(1000, 0.20, 0.15, 0.15, 0.10); + + // Should significantly reduce position size due to high drawdown + expect(result).toBeLessThan(500); + }); + }); + + describe('liquidityConstrainedPositionSize', () => { + it('should constrain position size based on liquidity', () => { + const result = liquidityConstrainedPositionSize(1000, 10000, 0.05, 100); + + // Max shares: 10000 * 0.05 = 500 + // Should return min(1000, 500) = 500 + expect(result).toBe(500); + }); + + it('should return desired size when liquidity allows', () => { + const result = liquidityConstrainedPositionSize(500, 20000, 0.05, 100); + + // Max shares: 20000 * 0.05 = 1000 + // Should return min(500, 1000) = 500 + expect(result).toBe(500); + }); + }); + + describe('multiTimeframePositionSize', () => { + it('should weight signals correctly', () => { + const result = multiTimeframePositionSize(100000, 0.8, 0.6, 0.4, 2); + + // Weighted signal: 0.8 * 0.2 + 0.6 * 0.3 + 0.4 * 0.5 = 0.16 + 0.18 + 0.2 = 0.54 + // Adjusted risk: 2 * 0.54 = 1.08% + // Position: 100000 * 0.0108 = 1080 + expect(result).toBe(1080); + }); + + it('should clamp signals to valid range', () => { + const result = multiTimeframePositionSize(100000, 2, -2, 1.5, 2); + + // Signals should be clamped to [-1, 1] + // Weighted: 1 * 0.2 + (-1) * 0.3 + 1 * 0.5 = 0.2 - 0.3 + 0.5 = 0.4 + // Adjusted risk: 2 * 0.4 = 0.8% + expect(result).toBe(800); + }); + }); + + describe('riskParityPositionSize', () => { + it('should allocate based on inverse volatility', () => { + const assets = [ + { volatility: 0.10, price: 100 }, + { volatility: 0.20, price: 200 } + ]; + + const result = riskParityPositionSize(assets, 0.15, 100000); + + // Asset 1: 1/0.10 = 10, Asset 2: 1/0.20 = 5 + // Total inverse vol: 15 + // Weights: Asset 1: 10/15 = 0.667, Asset 2: 5/15 = 0.333 + expect(result).toHaveLength(2); + expect(result[0]).toBeGreaterThan(result[1]); + }); + + it('should handle zero volatility assets', () => { + const assets = [ + { volatility: 0, price: 100 }, + { volatility: 0.20, price: 200 } + ]; + + const result = riskParityPositionSize(assets, 0.15, 100000); + + expect(result[0]).toBe(0); + expect(result[1]).toBeGreaterThan(0); + }); + }); + + describe('sharpeOptimizedPositionSize', () => { + it('should calculate position size based on Sharpe optimization', () => { + const result = sharpeOptimizedPositionSize(100000, 0.15, 0.20, 0.02, 3); + + // Kelly formula for continuous returns: f = (ΞΌ - r) / σ² + // Expected return: 0.15, Risk-free: 0.02, Volatility: 0.20 + // f = (0.15 - 0.02) / (0.20)Β² = 0.13 / 0.04 = 3.25 + // But capped at maxLeverage=3, so should be 3.0 + // Final position: 100000 * 3 = 300000 + expect(result).toBe(300000); + }); + + it('should return 0 for invalid inputs', () => { + // Invalid volatility + expect(sharpeOptimizedPositionSize(100000, 0.15, 0, 0.02)).toBe(0); + + // Invalid account size + expect(sharpeOptimizedPositionSize(0, 0.15, 0.20, 0.02)).toBe(0); + + // Expected return less than risk-free rate + expect(sharpeOptimizedPositionSize(100000, 0.01, 0.20, 0.02)).toBe(0); + }); + + it('should respect maximum leverage', () => { + const result = sharpeOptimizedPositionSize(100000, 0.30, 0.20, 0.02, 2); + + // Kelly fraction would be (0.30 - 0.02) / (0.20)Β² = 7, but capped at 2 + // Position: 100000 * 2 = 200000 + expect(result).toBe(200000); + }); + }); + + describe('validatePositionSize', () => { + it('should validate position size against limits', () => { + const result = validatePositionSize(500, 100, 100000, 10, 2); + + // Position value: 500 * 100 = 50000 (50% of account) + // This exceeds 10% limit + expect(result.isValid).toBe(false); + expect(result.violations).toContain('Position exceeds maximum 10% of account'); + expect(result.adjustedSize).toBe(100); // 10000 / 100 + }); + + it('should pass validation for reasonable position', () => { + const result = validatePositionSize(50, 100, 100000, 10, 2); + + // Position value: 50 * 100 = 5000 (5% of account) + expect(result.isValid).toBe(true); + expect(result.violations).toHaveLength(0); + expect(result.adjustedSize).toBe(50); + }); + + it('should handle fractional shares', () => { + const result = validatePositionSize(0.5, 100, 100000, 10, 2); + + expect(result.isValid).toBe(false); + expect(result.violations).toContain('Position size too small (less than 1 share)'); + expect(result.adjustedSize).toBe(0); + }); + }); +}); diff --git a/libs/utils/test/dateUtils.test.ts b/libs/utils/test/dateUtils.test.ts index c0c8c1c..71a9de0 100644 --- a/libs/utils/test/dateUtils.test.ts +++ b/libs/utils/test/dateUtils.test.ts @@ -1,80 +1,80 @@ -import { describe, it, expect } from 'bun:test'; -import { dateUtils } from '../src/dateUtils'; - -describe('dateUtils', () => { - describe('isTradingDay', () => { - it('should return true for weekdays (Monday-Friday)', () => { - // Monday (June 2, 2025) - expect(dateUtils.isTradingDay(new Date(2025, 5, 2))).toBe(true); - // Tuesday (June 3, 2025) - expect(dateUtils.isTradingDay(new Date(2025, 5, 3))).toBe(true); - // Wednesday (June 4, 2025) - expect(dateUtils.isTradingDay(new Date(2025, 5, 4))).toBe(true); - // Thursday (June 5, 2025) - expect(dateUtils.isTradingDay(new Date(2025, 5, 5))).toBe(true); - // Friday (June 6, 2025) - expect(dateUtils.isTradingDay(new Date(2025, 5, 6))).toBe(true); - }); - - it('should return false for weekends (Saturday-Sunday)', () => { - // Saturday (June 7, 2025) - expect(dateUtils.isTradingDay(new Date(2025, 5, 7))).toBe(false); - // Sunday (June 8, 2025) - expect(dateUtils.isTradingDay(new Date(2025, 5, 8))).toBe(false); - }); - }); - - describe('getNextTradingDay', () => { - it('should return the next day when current day is a weekday and next day is a weekday', () => { - // Monday -> Tuesday - const monday = new Date(2025, 5, 2); - const tuesday = new Date(2025, 5, 3); - expect(dateUtils.getNextTradingDay(monday).toDateString()).toBe(tuesday.toDateString()); - }); - - it('should skip weekends when getting next trading day', () => { - // Friday -> Monday - const friday = new Date(2025, 5, 6); - const monday = new Date(2025, 5, 9); - expect(dateUtils.getNextTradingDay(friday).toDateString()).toBe(monday.toDateString()); - }); - - it('should handle weekends as input correctly', () => { - // Saturday -> Monday - const saturday = new Date(2025, 5, 7); - const monday = new Date(2025, 5, 9); - expect(dateUtils.getNextTradingDay(saturday).toDateString()).toBe(monday.toDateString()); - - // Sunday -> Monday - const sunday = new Date(2025, 5, 8); - expect(dateUtils.getNextTradingDay(sunday).toDateString()).toBe(monday.toDateString()); - }); - }); - - describe('getPreviousTradingDay', () => { - it('should return the previous day when current day is a weekday and previous day is a weekday', () => { - // Tuesday -> Monday - const tuesday = new Date(2025, 5, 3); - const monday = new Date(2025, 5, 2); - expect(dateUtils.getPreviousTradingDay(tuesday).toDateString()).toBe(monday.toDateString()); - }); - - it('should skip weekends when getting previous trading day', () => { - // Monday -> Friday - const monday = new Date(2025, 5, 9); - const friday = new Date(2025, 5, 6); - expect(dateUtils.getPreviousTradingDay(monday).toDateString()).toBe(friday.toDateString()); - }); - - it('should handle weekends as input correctly', () => { - // Saturday -> Friday - const saturday = new Date(2025, 5, 7); - const friday = new Date(2025, 5, 6); - expect(dateUtils.getPreviousTradingDay(saturday).toDateString()).toBe(friday.toDateString()); - - // Sunday -> Friday - const sunday = new Date(2025, 5, 8); - expect(dateUtils.getPreviousTradingDay(sunday).toDateString()).toBe(friday.toDateString()); - }); - }); -}); +import { describe, it, expect } from 'bun:test'; +import { dateUtils } from '../src/dateUtils'; + +describe('dateUtils', () => { + describe('isTradingDay', () => { + it('should return true for weekdays (Monday-Friday)', () => { + // Monday (June 2, 2025) + expect(dateUtils.isTradingDay(new Date(2025, 5, 2))).toBe(true); + // Tuesday (June 3, 2025) + expect(dateUtils.isTradingDay(new Date(2025, 5, 3))).toBe(true); + // Wednesday (June 4, 2025) + expect(dateUtils.isTradingDay(new Date(2025, 5, 4))).toBe(true); + // Thursday (June 5, 2025) + expect(dateUtils.isTradingDay(new Date(2025, 5, 5))).toBe(true); + // Friday (June 6, 2025) + expect(dateUtils.isTradingDay(new Date(2025, 5, 6))).toBe(true); + }); + + it('should return false for weekends (Saturday-Sunday)', () => { + // Saturday (June 7, 2025) + expect(dateUtils.isTradingDay(new Date(2025, 5, 7))).toBe(false); + // Sunday (June 8, 2025) + expect(dateUtils.isTradingDay(new Date(2025, 5, 8))).toBe(false); + }); + }); + + describe('getNextTradingDay', () => { + it('should return the next day when current day is a weekday and next day is a weekday', () => { + // Monday -> Tuesday + const monday = new Date(2025, 5, 2); + const tuesday = new Date(2025, 5, 3); + expect(dateUtils.getNextTradingDay(monday).toDateString()).toBe(tuesday.toDateString()); + }); + + it('should skip weekends when getting next trading day', () => { + // Friday -> Monday + const friday = new Date(2025, 5, 6); + const monday = new Date(2025, 5, 9); + expect(dateUtils.getNextTradingDay(friday).toDateString()).toBe(monday.toDateString()); + }); + + it('should handle weekends as input correctly', () => { + // Saturday -> Monday + const saturday = new Date(2025, 5, 7); + const monday = new Date(2025, 5, 9); + expect(dateUtils.getNextTradingDay(saturday).toDateString()).toBe(monday.toDateString()); + + // Sunday -> Monday + const sunday = new Date(2025, 5, 8); + expect(dateUtils.getNextTradingDay(sunday).toDateString()).toBe(monday.toDateString()); + }); + }); + + describe('getPreviousTradingDay', () => { + it('should return the previous day when current day is a weekday and previous day is a weekday', () => { + // Tuesday -> Monday + const tuesday = new Date(2025, 5, 3); + const monday = new Date(2025, 5, 2); + expect(dateUtils.getPreviousTradingDay(tuesday).toDateString()).toBe(monday.toDateString()); + }); + + it('should skip weekends when getting previous trading day', () => { + // Monday -> Friday + const monday = new Date(2025, 5, 9); + const friday = new Date(2025, 5, 6); + expect(dateUtils.getPreviousTradingDay(monday).toDateString()).toBe(friday.toDateString()); + }); + + it('should handle weekends as input correctly', () => { + // Saturday -> Friday + const saturday = new Date(2025, 5, 7); + const friday = new Date(2025, 5, 6); + expect(dateUtils.getPreviousTradingDay(saturday).toDateString()).toBe(friday.toDateString()); + + // Sunday -> Friday + const sunday = new Date(2025, 5, 8); + expect(dateUtils.getPreviousTradingDay(sunday).toDateString()).toBe(friday.toDateString()); + }); + }); +}); diff --git a/libs/utils/tsconfig.json b/libs/utils/tsconfig.json index 3030b42..e8f78e0 100644 --- a/libs/utils/tsconfig.json +++ b/libs/utils/tsconfig.json @@ -1,13 +1,13 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../config" }, - { "path": "../logger" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../config" }, + { "path": "../logger" } + ] +} diff --git a/libs/utils/turbo.json b/libs/utils/turbo.json index 9dbb1ee..9d8964a 100644 --- a/libs/utils/turbo.json +++ b/libs/utils/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/libs/vector-engine/package.json b/libs/vector-engine/package.json index 30d0dc4..fdf422b 100644 --- a/libs/vector-engine/package.json +++ b/libs/vector-engine/package.json @@ -1,34 +1,34 @@ -{ - "name": "@stock-bot/vector-engine", - "version": "1.0.0", - "description": "Vectorized computation engine for high-performance backtesting", - "main": "dist/index.js", - "types": "dist/index.d.ts", - "type": "module", - "scripts": { - "build": "tsc", - "test": "bun test", - "clean": "rimraf dist" - }, - "dependencies": { - "@stock-bot/logger": "*", - "@stock-bot/utils": "*", - "@stock-bot/data-frame": "*" - }, - "devDependencies": { - "@types/node": "^20.11.0", - "typescript": "^5.3.0", - "bun-types": "^1.2.15" - }, - "exports": { - ".": { - "import": "./dist/index.js", - "require": "./dist/index.js", - "types": "./dist/index.d.ts" - } - }, - "files": [ - "dist", - "README.md" - ] -} +{ + "name": "@stock-bot/vector-engine", + "version": "1.0.0", + "description": "Vectorized computation engine for high-performance backtesting", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "type": "module", + "scripts": { + "build": "tsc", + "test": "bun test", + "clean": "rimraf dist" + }, + "dependencies": { + "@stock-bot/logger": "*", + "@stock-bot/utils": "*", + "@stock-bot/data-frame": "*" + }, + "devDependencies": { + "@types/node": "^20.11.0", + "typescript": "^5.3.0", + "bun-types": "^1.2.15" + }, + "exports": { + ".": { + "import": "./dist/index.js", + "require": "./dist/index.js", + "types": "./dist/index.d.ts" + } + }, + "files": [ + "dist", + "README.md" + ] +} diff --git a/libs/vector-engine/src/index.ts b/libs/vector-engine/src/index.ts index d44665f..679fb57 100644 --- a/libs/vector-engine/src/index.ts +++ b/libs/vector-engine/src/index.ts @@ -1,393 +1,393 @@ -import { getLogger } from '@stock-bot/logger'; -import { DataFrame } from '@stock-bot/data-frame'; -import { atr, sma, ema, rsi, macd, bollingerBands } from '@stock-bot/utils'; - -// Vector operations interface -export interface VectorOperation { - name: string; - inputs: string[]; - output: string; - operation: (inputs: number[][]) => number[]; -} - -// Vectorized strategy context -export interface VectorizedContext { - data: DataFrame; - lookback: number; - indicators: Record; - signals: Record; -} - -// Performance metrics for vectorized backtesting -export interface VectorizedMetrics { - totalReturns: number; - sharpeRatio: number; - maxDrawdown: number; - winRate: number; - profitFactor: number; - totalTrades: number; - avgTrade: number; - returns: number[]; - drawdown: number[]; - equity: number[]; -} - -// Vectorized backtest result -export interface VectorizedBacktestResult { - metrics: VectorizedMetrics; - trades: VectorizedTrade[]; - equity: number[]; - timestamps: number[]; - signals: Record; -} - -export interface VectorizedTrade { - entryIndex: number; - exitIndex: number; - entryPrice: number; - exitPrice: number; - quantity: number; - side: 'LONG' | 'SHORT'; - pnl: number; - return: number; - duration: number; -} - -// Vectorized strategy engine -export class VectorEngine { - private logger = getLogger('vector-engine'); - private operations: Map = new Map(); - - constructor() { - this.registerDefaultOperations(); - } - - private registerDefaultOperations(): void { - // Register common mathematical operations - this.registerOperation({ - name: 'add', - inputs: ['a', 'b'], - output: 'result', - operation: ([a, b]) => a.map((val, i) => val + b[i]) - }); - - this.registerOperation({ - name: 'subtract', - inputs: ['a', 'b'], - output: 'result', - operation: ([a, b]) => a.map((val, i) => val - b[i]) - }); - - this.registerOperation({ - name: 'multiply', - inputs: ['a', 'b'], - output: 'result', - operation: ([a, b]) => a.map((val, i) => val * b[i]) - }); - - this.registerOperation({ - name: 'divide', - inputs: ['a', 'b'], - output: 'result', - operation: ([a, b]) => a.map((val, i) => b[i] !== 0 ? val / b[i] : NaN) - }); - - // Register comparison operations - this.registerOperation({ - name: 'greater_than', - inputs: ['a', 'b'], - output: 'result', - operation: ([a, b]) => a.map((val, i) => val > b[i] ? 1 : 0) - }); - - this.registerOperation({ - name: 'less_than', - inputs: ['a', 'b'], - output: 'result', - operation: ([a, b]) => a.map((val, i) => val < b[i] ? 1 : 0) - }); - - this.registerOperation({ - name: 'crossover', - inputs: ['a', 'b'], - output: 'result', - operation: ([a, b]) => { - const result = new Array(a.length).fill(0); - for (let i = 1; i < a.length; i++) { - if (a[i] > b[i] && a[i - 1] <= b[i - 1]) { - result[i] = 1; - } - } - return result; - } - }); - - this.registerOperation({ - name: 'crossunder', - inputs: ['a', 'b'], - output: 'result', - operation: ([a, b]) => { - const result = new Array(a.length).fill(0); - for (let i = 1; i < a.length; i++) { - if (a[i] < b[i] && a[i - 1] >= b[i - 1]) { - result[i] = 1; - } - } - return result; - } - }); - } - - registerOperation(operation: VectorOperation): void { - this.operations.set(operation.name, operation); - this.logger.debug(`Registered operation: ${operation.name}`); - } - - // Execute vectorized strategy - async executeVectorizedStrategy( - data: DataFrame, - strategyCode: string - ): Promise { - try { - const context = this.prepareContext(data); - const signals = this.executeStrategy(context, strategyCode); - const trades = this.generateTrades(data, signals); - const metrics = this.calculateMetrics(data, trades); - - return { - metrics, - trades, - equity: metrics.equity, - timestamps: data.getColumn('timestamp'), - signals - }; - } catch (error) { - this.logger.error('Vectorized strategy execution failed', error); - throw error; - } - } - - private prepareContext(data: DataFrame): VectorizedContext { - const close = data.getColumn('close'); - const high = data.getColumn('high'); - const low = data.getColumn('low'); - const volume = data.getColumn('volume'); - - // Calculate common indicators - const indicators: Record = { - sma_20: sma(close, 20), - sma_50: sma(close, 50), - ema_12: ema(close, 12), - ema_26: ema(close, 26), - rsi: rsi(close), - }; - - const m = macd(close); - indicators.macd = m.macd; - indicators.macd_signal = m.signal; - indicators.macd_histogram = m.histogram; - - const bb = bollingerBands(close); - indicators.bb_upper = bb.upper; - indicators.bb_middle = bb.middle; - indicators.bb_lower = bb.lower; - - return { - data, - lookback: 100, - indicators, - signals: {} - }; - } - - private executeStrategy(context: VectorizedContext, strategyCode: string): Record { - // This is a simplified strategy execution - // In production, you'd want a more sophisticated strategy compiler/interpreter - const signals: Record = { - buy: new Array(context.data.length).fill(0), - sell: new Array(context.data.length).fill(0) - }; - - // Example: Simple moving average crossover strategy - if (strategyCode.includes('sma_crossover')) { - const sma20 = context.indicators.sma_20; - const sma50 = context.indicators.sma_50; - - for (let i = 1; i < sma20.length; i++) { - // Buy signal: SMA20 crosses above SMA50 - if (!isNaN(sma20[i]) && !isNaN(sma50[i]) && - !isNaN(sma20[i-1]) && !isNaN(sma50[i-1])) { - if (sma20[i] > sma50[i] && sma20[i-1] <= sma50[i-1]) { - signals.buy[i] = 1; - } - // Sell signal: SMA20 crosses below SMA50 - else if (sma20[i] < sma50[i] && sma20[i-1] >= sma50[i-1]) { - signals.sell[i] = 1; - } - } - } - } - - return signals; - } - - private generateTrades(data: DataFrame, signals: Record): VectorizedTrade[] { - const trades: VectorizedTrade[] = []; - const close = data.getColumn('close'); - const timestamps = data.getColumn('timestamp'); - - let position: { index: number; price: number; side: 'LONG' | 'SHORT' } | null = null; - - for (let i = 0; i < close.length; i++) { - if (signals.buy[i] === 1 && !position) { - // Open long position - position = { - index: i, - price: close[i], - side: 'LONG' - }; - } else if (signals.sell[i] === 1) { - if (position && position.side === 'LONG') { - // Close long position - const trade: VectorizedTrade = { - entryIndex: position.index, - exitIndex: i, - entryPrice: position.price, - exitPrice: close[i], - quantity: 1, // Simplified: always trade 1 unit - side: 'LONG', - pnl: close[i] - position.price, - return: (close[i] - position.price) / position.price, - duration: timestamps[i] - timestamps[position.index] - }; - trades.push(trade); - position = null; - } else if (!position) { - // Open short position - position = { - index: i, - price: close[i], - side: 'SHORT' - }; - } - } else if (signals.buy[i] === 1 && position && position.side === 'SHORT') { - // Close short position - const trade: VectorizedTrade = { - entryIndex: position.index, - exitIndex: i, - entryPrice: position.price, - exitPrice: close[i], - quantity: 1, - side: 'SHORT', - pnl: position.price - close[i], - return: (position.price - close[i]) / position.price, - duration: timestamps[i] - timestamps[position.index] - }; - trades.push(trade); - position = null; - } - } - - return trades; - } - - private calculateMetrics(data: DataFrame, trades: VectorizedTrade[]): VectorizedMetrics { - if (trades.length === 0) { - return { - totalReturns: 0, - sharpeRatio: 0, - maxDrawdown: 0, - winRate: 0, - profitFactor: 0, - totalTrades: 0, - avgTrade: 0, - returns: [], - drawdown: [], - equity: [] - }; - } - - const returns = trades.map(t => t.return); - const pnls = trades.map(t => t.pnl); - - // Calculate equity curve - const equity: number[] = [10000]; // Starting capital - let currentEquity = 10000; - - for (const trade of trades) { - currentEquity += trade.pnl; - equity.push(currentEquity); - } - - // Calculate drawdown - const drawdown: number[] = []; - let peak = equity[0]; - - for (const eq of equity) { - if (eq > peak) peak = eq; - drawdown.push((peak - eq) / peak); - } - - const totalReturns = (equity[equity.length - 1] - equity[0]) / equity[0]; - const avgReturn = returns.reduce((sum, r) => sum + r, 0) / returns.length; - const returnStd = Math.sqrt( - returns.reduce((sum, r) => sum + Math.pow(r - avgReturn, 2), 0) / returns.length - ); - - const winningTrades = trades.filter(t => t.pnl > 0); - const losingTrades = trades.filter(t => t.pnl < 0); - - const grossProfit = winningTrades.reduce((sum, t) => sum + t.pnl, 0); - const grossLoss = Math.abs(losingTrades.reduce((sum, t) => sum + t.pnl, 0)); - - return { - totalReturns, - sharpeRatio: returnStd !== 0 ? (avgReturn / returnStd) * Math.sqrt(252) : 0, - maxDrawdown: Math.max(...drawdown), - winRate: winningTrades.length / trades.length, - profitFactor: grossLoss !== 0 ? grossProfit / grossLoss : Infinity, - totalTrades: trades.length, - avgTrade: pnls.reduce((sum, pnl) => sum + pnl, 0) / trades.length, - returns, - drawdown, - equity - }; - } - - // Utility methods for vectorized operations - applyOperation(operationName: string, inputs: Record): number[] { - const operation = this.operations.get(operationName); - if (!operation) { - throw new Error(`Operation '${operationName}' not found`); - } - - const inputArrays = operation.inputs.map(inputName => { - if (!inputs[inputName]) { - throw new Error(`Input '${inputName}' not provided for operation '${operationName}'`); - } - return inputs[inputName]; - }); - - return operation.operation(inputArrays); - } - - // Batch processing for multiple strategies - async batchBacktest( - data: DataFrame, - strategies: Array<{ id: string; code: string }> - ): Promise> { - const results: Record = {}; - - for (const strategy of strategies) { - try { - this.logger.info(`Running vectorized backtest for strategy: ${strategy.id}`); - results[strategy.id] = await this.executeVectorizedStrategy(data, strategy.code); - } catch (error) { - this.logger.error(`Backtest failed for strategy: ${strategy.id}`, error); - // Continue with other strategies - } - } - - return results; - } +import { getLogger } from '@stock-bot/logger'; +import { DataFrame } from '@stock-bot/data-frame'; +import { atr, sma, ema, rsi, macd, bollingerBands } from '@stock-bot/utils'; + +// Vector operations interface +export interface VectorOperation { + name: string; + inputs: string[]; + output: string; + operation: (inputs: number[][]) => number[]; +} + +// Vectorized strategy context +export interface VectorizedContext { + data: DataFrame; + lookback: number; + indicators: Record; + signals: Record; +} + +// Performance metrics for vectorized backtesting +export interface VectorizedMetrics { + totalReturns: number; + sharpeRatio: number; + maxDrawdown: number; + winRate: number; + profitFactor: number; + totalTrades: number; + avgTrade: number; + returns: number[]; + drawdown: number[]; + equity: number[]; +} + +// Vectorized backtest result +export interface VectorizedBacktestResult { + metrics: VectorizedMetrics; + trades: VectorizedTrade[]; + equity: number[]; + timestamps: number[]; + signals: Record; +} + +export interface VectorizedTrade { + entryIndex: number; + exitIndex: number; + entryPrice: number; + exitPrice: number; + quantity: number; + side: 'LONG' | 'SHORT'; + pnl: number; + return: number; + duration: number; +} + +// Vectorized strategy engine +export class VectorEngine { + private logger = getLogger('vector-engine'); + private operations: Map = new Map(); + + constructor() { + this.registerDefaultOperations(); + } + + private registerDefaultOperations(): void { + // Register common mathematical operations + this.registerOperation({ + name: 'add', + inputs: ['a', 'b'], + output: 'result', + operation: ([a, b]) => a.map((val, i) => val + b[i]) + }); + + this.registerOperation({ + name: 'subtract', + inputs: ['a', 'b'], + output: 'result', + operation: ([a, b]) => a.map((val, i) => val - b[i]) + }); + + this.registerOperation({ + name: 'multiply', + inputs: ['a', 'b'], + output: 'result', + operation: ([a, b]) => a.map((val, i) => val * b[i]) + }); + + this.registerOperation({ + name: 'divide', + inputs: ['a', 'b'], + output: 'result', + operation: ([a, b]) => a.map((val, i) => b[i] !== 0 ? val / b[i] : NaN) + }); + + // Register comparison operations + this.registerOperation({ + name: 'greater_than', + inputs: ['a', 'b'], + output: 'result', + operation: ([a, b]) => a.map((val, i) => val > b[i] ? 1 : 0) + }); + + this.registerOperation({ + name: 'less_than', + inputs: ['a', 'b'], + output: 'result', + operation: ([a, b]) => a.map((val, i) => val < b[i] ? 1 : 0) + }); + + this.registerOperation({ + name: 'crossover', + inputs: ['a', 'b'], + output: 'result', + operation: ([a, b]) => { + const result = new Array(a.length).fill(0); + for (let i = 1; i < a.length; i++) { + if (a[i] > b[i] && a[i - 1] <= b[i - 1]) { + result[i] = 1; + } + } + return result; + } + }); + + this.registerOperation({ + name: 'crossunder', + inputs: ['a', 'b'], + output: 'result', + operation: ([a, b]) => { + const result = new Array(a.length).fill(0); + for (let i = 1; i < a.length; i++) { + if (a[i] < b[i] && a[i - 1] >= b[i - 1]) { + result[i] = 1; + } + } + return result; + } + }); + } + + registerOperation(operation: VectorOperation): void { + this.operations.set(operation.name, operation); + this.logger.debug(`Registered operation: ${operation.name}`); + } + + // Execute vectorized strategy + async executeVectorizedStrategy( + data: DataFrame, + strategyCode: string + ): Promise { + try { + const context = this.prepareContext(data); + const signals = this.executeStrategy(context, strategyCode); + const trades = this.generateTrades(data, signals); + const metrics = this.calculateMetrics(data, trades); + + return { + metrics, + trades, + equity: metrics.equity, + timestamps: data.getColumn('timestamp'), + signals + }; + } catch (error) { + this.logger.error('Vectorized strategy execution failed', error); + throw error; + } + } + + private prepareContext(data: DataFrame): VectorizedContext { + const close = data.getColumn('close'); + const high = data.getColumn('high'); + const low = data.getColumn('low'); + const volume = data.getColumn('volume'); + + // Calculate common indicators + const indicators: Record = { + sma_20: sma(close, 20), + sma_50: sma(close, 50), + ema_12: ema(close, 12), + ema_26: ema(close, 26), + rsi: rsi(close), + }; + + const m = macd(close); + indicators.macd = m.macd; + indicators.macd_signal = m.signal; + indicators.macd_histogram = m.histogram; + + const bb = bollingerBands(close); + indicators.bb_upper = bb.upper; + indicators.bb_middle = bb.middle; + indicators.bb_lower = bb.lower; + + return { + data, + lookback: 100, + indicators, + signals: {} + }; + } + + private executeStrategy(context: VectorizedContext, strategyCode: string): Record { + // This is a simplified strategy execution + // In production, you'd want a more sophisticated strategy compiler/interpreter + const signals: Record = { + buy: new Array(context.data.length).fill(0), + sell: new Array(context.data.length).fill(0) + }; + + // Example: Simple moving average crossover strategy + if (strategyCode.includes('sma_crossover')) { + const sma20 = context.indicators.sma_20; + const sma50 = context.indicators.sma_50; + + for (let i = 1; i < sma20.length; i++) { + // Buy signal: SMA20 crosses above SMA50 + if (!isNaN(sma20[i]) && !isNaN(sma50[i]) && + !isNaN(sma20[i-1]) && !isNaN(sma50[i-1])) { + if (sma20[i] > sma50[i] && sma20[i-1] <= sma50[i-1]) { + signals.buy[i] = 1; + } + // Sell signal: SMA20 crosses below SMA50 + else if (sma20[i] < sma50[i] && sma20[i-1] >= sma50[i-1]) { + signals.sell[i] = 1; + } + } + } + } + + return signals; + } + + private generateTrades(data: DataFrame, signals: Record): VectorizedTrade[] { + const trades: VectorizedTrade[] = []; + const close = data.getColumn('close'); + const timestamps = data.getColumn('timestamp'); + + let position: { index: number; price: number; side: 'LONG' | 'SHORT' } | null = null; + + for (let i = 0; i < close.length; i++) { + if (signals.buy[i] === 1 && !position) { + // Open long position + position = { + index: i, + price: close[i], + side: 'LONG' + }; + } else if (signals.sell[i] === 1) { + if (position && position.side === 'LONG') { + // Close long position + const trade: VectorizedTrade = { + entryIndex: position.index, + exitIndex: i, + entryPrice: position.price, + exitPrice: close[i], + quantity: 1, // Simplified: always trade 1 unit + side: 'LONG', + pnl: close[i] - position.price, + return: (close[i] - position.price) / position.price, + duration: timestamps[i] - timestamps[position.index] + }; + trades.push(trade); + position = null; + } else if (!position) { + // Open short position + position = { + index: i, + price: close[i], + side: 'SHORT' + }; + } + } else if (signals.buy[i] === 1 && position && position.side === 'SHORT') { + // Close short position + const trade: VectorizedTrade = { + entryIndex: position.index, + exitIndex: i, + entryPrice: position.price, + exitPrice: close[i], + quantity: 1, + side: 'SHORT', + pnl: position.price - close[i], + return: (position.price - close[i]) / position.price, + duration: timestamps[i] - timestamps[position.index] + }; + trades.push(trade); + position = null; + } + } + + return trades; + } + + private calculateMetrics(data: DataFrame, trades: VectorizedTrade[]): VectorizedMetrics { + if (trades.length === 0) { + return { + totalReturns: 0, + sharpeRatio: 0, + maxDrawdown: 0, + winRate: 0, + profitFactor: 0, + totalTrades: 0, + avgTrade: 0, + returns: [], + drawdown: [], + equity: [] + }; + } + + const returns = trades.map(t => t.return); + const pnls = trades.map(t => t.pnl); + + // Calculate equity curve + const equity: number[] = [10000]; // Starting capital + let currentEquity = 10000; + + for (const trade of trades) { + currentEquity += trade.pnl; + equity.push(currentEquity); + } + + // Calculate drawdown + const drawdown: number[] = []; + let peak = equity[0]; + + for (const eq of equity) { + if (eq > peak) peak = eq; + drawdown.push((peak - eq) / peak); + } + + const totalReturns = (equity[equity.length - 1] - equity[0]) / equity[0]; + const avgReturn = returns.reduce((sum, r) => sum + r, 0) / returns.length; + const returnStd = Math.sqrt( + returns.reduce((sum, r) => sum + Math.pow(r - avgReturn, 2), 0) / returns.length + ); + + const winningTrades = trades.filter(t => t.pnl > 0); + const losingTrades = trades.filter(t => t.pnl < 0); + + const grossProfit = winningTrades.reduce((sum, t) => sum + t.pnl, 0); + const grossLoss = Math.abs(losingTrades.reduce((sum, t) => sum + t.pnl, 0)); + + return { + totalReturns, + sharpeRatio: returnStd !== 0 ? (avgReturn / returnStd) * Math.sqrt(252) : 0, + maxDrawdown: Math.max(...drawdown), + winRate: winningTrades.length / trades.length, + profitFactor: grossLoss !== 0 ? grossProfit / grossLoss : Infinity, + totalTrades: trades.length, + avgTrade: pnls.reduce((sum, pnl) => sum + pnl, 0) / trades.length, + returns, + drawdown, + equity + }; + } + + // Utility methods for vectorized operations + applyOperation(operationName: string, inputs: Record): number[] { + const operation = this.operations.get(operationName); + if (!operation) { + throw new Error(`Operation '${operationName}' not found`); + } + + const inputArrays = operation.inputs.map(inputName => { + if (!inputs[inputName]) { + throw new Error(`Input '${inputName}' not provided for operation '${operationName}'`); + } + return inputs[inputName]; + }); + + return operation.operation(inputArrays); + } + + // Batch processing for multiple strategies + async batchBacktest( + data: DataFrame, + strategies: Array<{ id: string; code: string }> + ): Promise> { + const results: Record = {}; + + for (const strategy of strategies) { + try { + this.logger.info(`Running vectorized backtest for strategy: ${strategy.id}`); + results[strategy.id] = await this.executeVectorizedStrategy(data, strategy.code); + } catch (error) { + this.logger.error(`Backtest failed for strategy: ${strategy.id}`, error); + // Continue with other strategies + } + } + + return results; + } } \ No newline at end of file diff --git a/libs/vector-engine/tsconfig.json b/libs/vector-engine/tsconfig.json index e65065b..fb82068 100644 --- a/libs/vector-engine/tsconfig.json +++ b/libs/vector-engine/tsconfig.json @@ -1,14 +1,14 @@ -{ - "extends": "../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src" - }, - "include": ["src/**/*"], - "references": [ - { "path": "../types" }, - { "path": "../logger" }, - { "path": "../utils" }, - { "path": "../data-frame" } - ] -} +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src" + }, + "include": ["src/**/*"], + "references": [ + { "path": "../types" }, + { "path": "../logger" }, + { "path": "../utils" }, + { "path": "../data-frame" } + ] +} diff --git a/libs/vector-engine/turbo.json b/libs/vector-engine/turbo.json index af1c4ad..721204b 100644 --- a/libs/vector-engine/turbo.json +++ b/libs/vector-engine/turbo.json @@ -1,10 +1,10 @@ -{ - "extends": ["//"], - "tasks": { - "build": { - "dependsOn": ["@stock-bot/types#build", "@stock-bot/utils#build", "@stock-bot/data-frame#build"], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - } - } -} +{ + "extends": ["//"], + "tasks": { + "build": { + "dependsOn": ["@stock-bot/types#build", "@stock-bot/utils#build", "@stock-bot/data-frame#build"], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + } + } +} diff --git a/monitoring/grafana/provisioning/dashboards/json/stock-bot-logs.json b/monitoring/grafana/provisioning/dashboards/json/stock-bot-logs.json index abd376d..51bd5dd 100644 --- a/monitoring/grafana/provisioning/dashboards/json/stock-bot-logs.json +++ b/monitoring/grafana/provisioning/dashboards/json/stock-bot-logs.json @@ -1,211 +1,211 @@ -{ - "annotations": { - "list": [ - { - "builtIn": 1, - "datasource": { - "type": "grafana", - "uid": "-- Grafana --" - }, - "enable": true, - "hide": true, - "iconColor": "rgba(0, 211, 255, 1)", - "name": "Annotations & Alerts", - "type": "dashboard" - } - ] - }, - "editable": true, - "fiscalYearStartMonth": 0, - "graphTooltip": 0, - "id": 1, - "links": [], - "liveNow": false, - "panels": [ - { - "datasource": { - "type": "loki", - "uid": "loki" - }, - "description": "Basic log viewer for Stock Bot services", - "gridPos": { - "h": 8, - "w": 24, - "x": 0, - "y": 0 - }, - "id": 1, - "options": { - "dedupStrategy": "none", - "enableLogDetails": true, - "prettifyLogMessage": false, - "showCommonLabels": false, - "showLabels": true, - "showTime": true, - "sortOrder": "Descending", - "wrapLogMessage": false - }, - "targets": [ - { - "datasource": { - "type": "loki", - "uid": "loki" - }, - "editorMode": "builder", - "expr": "{service=~\".+\"}", - "queryType": "range" - } - ], - "title": "All Service Logs", - "type": "logs" - }, - { - "datasource": { - "type": "loki", - "uid": "loki" - }, - "description": "", - "fieldConfig": { - "defaults": { - "color": { - "mode": "palette-classic" - }, - "custom": { - "axisCenteredZero": false, - "axisColorMode": "text", - "axisLabel": "", - "axisPlacement": "auto", - "barAlignment": 0, - "drawStyle": "line", - "fillOpacity": 10, - "gradientMode": "none", - "hideFrom": { - "legend": false, - "tooltip": false, - "viz": false - }, - "insertNulls": false, - "lineInterpolation": "linear", - "lineWidth": 2, - "pointSize": 5, - "scaleDistribution": { - "type": "linear" - }, - "showPoints": "never", - "spanNulls": false, - "stacking": { - "group": "A", - "mode": "none" - }, - "thresholdsStyle": { - "mode": "off" - } - }, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] - }, - "unitScale": true - }, - "overrides": [] - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 8 - }, - "id": 2, - "options": { - "legend": { - "calcs": [], - "displayMode": "list", - "placement": "bottom", - "showLegend": true - }, - "tooltip": { - "mode": "single", - "sort": "none" - } - }, - "pluginVersion": "10.2.0", - "targets": [ - { - "datasource": { - "type": "loki", - "uid": "loki" - }, - "editorMode": "builder", - "expr": "sum by(service) (count_over_time({level=\"error\"}[5m]))", - "legendFormat": "{{service}}", - "queryType": "range" - } - ], - "title": "Error Count by Service", - "type": "timeseries" - }, - { - "datasource": { - "type": "loki", - "uid": "loki" - }, - "gridPos": { - "h": 8, - "w": 12, - "x": 12, - "y": 8 - }, - "id": 3, - "options": { - "dedupStrategy": "none", - "enableLogDetails": true, - "prettifyLogMessage": false, - "showCommonLabels": false, - "showLabels": false, - "showTime": true, - "sortOrder": "Descending", - "wrapLogMessage": false - }, - "targets": [ - { - "datasource": { - "type": "loki", - "uid": "loki" - }, - "editorMode": "builder", - "expr": "{level=\"error\"}", - "queryType": "range" - } - ], - "title": "Error Logs", - "type": "logs" - } - ], - "refresh": "5s", - "schemaVersion": 38, - "style": "dark", - "tags": [], - "templating": { - "list": [] - }, - "time": { - "from": "now-1h", - "to": "now" - }, - "timepicker": {}, - "timezone": "", - "title": "Stock Bot Logs", - "uid": "stock-bot-logs", - "version": 1, - "weekStart": "" -} +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": 1, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "description": "Basic log viewer for Stock Bot services", + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 0 + }, + "id": 1, + "options": { + "dedupStrategy": "none", + "enableLogDetails": true, + "prettifyLogMessage": false, + "showCommonLabels": false, + "showLabels": true, + "showTime": true, + "sortOrder": "Descending", + "wrapLogMessage": false + }, + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "builder", + "expr": "{service=~\".+\"}", + "queryType": "range" + } + ], + "title": "All Service Logs", + "type": "logs" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "description": "", + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 10, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "insertNulls": false, + "lineInterpolation": "linear", + "lineWidth": 2, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "never", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + }, + "unitScale": true + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 8 + }, + "id": 2, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "10.2.0", + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "builder", + "expr": "sum by(service) (count_over_time({level=\"error\"}[5m]))", + "legendFormat": "{{service}}", + "queryType": "range" + } + ], + "title": "Error Count by Service", + "type": "timeseries" + }, + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 8 + }, + "id": 3, + "options": { + "dedupStrategy": "none", + "enableLogDetails": true, + "prettifyLogMessage": false, + "showCommonLabels": false, + "showLabels": false, + "showTime": true, + "sortOrder": "Descending", + "wrapLogMessage": false + }, + "targets": [ + { + "datasource": { + "type": "loki", + "uid": "loki" + }, + "editorMode": "builder", + "expr": "{level=\"error\"}", + "queryType": "range" + } + ], + "title": "Error Logs", + "type": "logs" + } + ], + "refresh": "5s", + "schemaVersion": 38, + "style": "dark", + "tags": [], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "Stock Bot Logs", + "uid": "stock-bot-logs", + "version": 1, + "weekStart": "" +} diff --git a/monitoring/grafana/provisioning/dashboards/stockbot.yml b/monitoring/grafana/provisioning/dashboards/stockbot.yml index e2f96a3..4662952 100644 --- a/monitoring/grafana/provisioning/dashboards/stockbot.yml +++ b/monitoring/grafana/provisioning/dashboards/stockbot.yml @@ -1,11 +1,11 @@ -apiVersion: 1 - -providers: - - name: 'Stock Bot Dashboards' - orgId: 1 - folder: 'Stock Bot' - type: file - disableDeletion: false - editable: true - options: - path: /etc/grafana/provisioning/dashboards/json +apiVersion: 1 + +providers: + - name: 'Stock Bot Dashboards' + orgId: 1 + folder: 'Stock Bot' + type: file + disableDeletion: false + editable: true + options: + path: /etc/grafana/provisioning/dashboards/json diff --git a/monitoring/grafana/provisioning/datasources/loki.yml b/monitoring/grafana/provisioning/datasources/loki.yml index 8f87872..0ec0dab 100644 --- a/monitoring/grafana/provisioning/datasources/loki.yml +++ b/monitoring/grafana/provisioning/datasources/loki.yml @@ -1,10 +1,10 @@ -apiVersion: 1 - -datasources: - - name: Loki - type: loki - access: proxy - url: http://loki:3100 - jsonData: - maxLines: 1000 - isDefault: true +apiVersion: 1 + +datasources: + - name: Loki + type: loki + access: proxy + url: http://loki:3100 + jsonData: + maxLines: 1000 + isDefault: true diff --git a/monitoring/grafana/provisioning/datasources/prometheus.yml b/monitoring/grafana/provisioning/datasources/prometheus.yml index 8697d5d..7da7ba4 100644 --- a/monitoring/grafana/provisioning/datasources/prometheus.yml +++ b/monitoring/grafana/provisioning/datasources/prometheus.yml @@ -1,7 +1,7 @@ -datasources: - - name: Prometheus - type: prometheus - access: proxy - url: http://prometheus:9090 - isDefault: true +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + isDefault: true editable: true \ No newline at end of file diff --git a/monitoring/loki/loki-config.yaml b/monitoring/loki/loki-config.yaml index 482ffd7..824e7fa 100644 --- a/monitoring/loki/loki-config.yaml +++ b/monitoring/loki/loki-config.yaml @@ -1,41 +1,41 @@ -auth_enabled: false - -server: - http_listen_port: 3100 - grpc_listen_port: 9096 - -common: - path_prefix: /loki - storage: - filesystem: - chunks_directory: /loki/chunks - rules_directory: /loki/rules - replication_factor: 1 - ring: - instance_addr: 127.0.0.1 - kvstore: - store: inmemory - -schema_config: - configs: - - from: 2023-01-01 - store: boltdb-shipper - object_store: filesystem - schema: v11 - index: - prefix: index_ - period: 24h - -ruler: - alertmanager_url: http://localhost:9093 - -# 30 days retention -limits_config: - retention_period: 720h - -query_range: - results_cache: - cache: - embedded_cache: - enabled: true - max_size_mb: 100 +auth_enabled: false + +server: + http_listen_port: 3100 + grpc_listen_port: 9096 + +common: + path_prefix: /loki + storage: + filesystem: + chunks_directory: /loki/chunks + rules_directory: /loki/rules + replication_factor: 1 + ring: + instance_addr: 127.0.0.1 + kvstore: + store: inmemory + +schema_config: + configs: + - from: 2023-01-01 + store: boltdb-shipper + object_store: filesystem + schema: v11 + index: + prefix: index_ + period: 24h + +ruler: + alertmanager_url: http://localhost:9093 + +# 30 days retention +limits_config: + retention_period: 720h + +query_range: + results_cache: + cache: + embedded_cache: + enabled: true + max_size_mb: 100 diff --git a/monitoring/prometheus.yml b/monitoring/prometheus.yml index 21f6a74..4bb976a 100644 --- a/monitoring/prometheus.yml +++ b/monitoring/prometheus.yml @@ -1,45 +1,45 @@ -global: - scrape_interval: 15s - evaluation_interval: 15s - -rule_files: - # - "first_rules.yml" - # - "second_rules.yml" - -scrape_configs: - # Prometheus itself - - job_name: 'prometheus' - static_configs: - - targets: ['localhost:9090'] - - # Trading Bot Services - - job_name: 'market-data-gateway' - static_configs: - - targets: ['host.docker.internal:3001'] - metrics_path: '/metrics' - scrape_interval: 5s - - - job_name: 'strategy-orchestrator' - static_configs: - - targets: ['host.docker.internal:4001'] - metrics_path: '/metrics' - scrape_interval: 10s - - - job_name: 'risk-guardian' - static_configs: - - targets: ['host.docker.internal:3002'] - metrics_path: '/metrics' - scrape_interval: 10s - - # Infrastructure - - job_name: 'dragonfly' - static_configs: - - targets: ['dragonfly:6379'] - - - job_name: 'postgres' - static_configs: - - targets: ['postgres:5432'] - - - job_name: 'questdb' - static_configs: - - targets: ['questdb:9000'] +global: + scrape_interval: 15s + evaluation_interval: 15s + +rule_files: + # - "first_rules.yml" + # - "second_rules.yml" + +scrape_configs: + # Prometheus itself + - job_name: 'prometheus' + static_configs: + - targets: ['localhost:9090'] + + # Trading Bot Services + - job_name: 'market-data-gateway' + static_configs: + - targets: ['host.docker.internal:3001'] + metrics_path: '/metrics' + scrape_interval: 5s + + - job_name: 'strategy-orchestrator' + static_configs: + - targets: ['host.docker.internal:4001'] + metrics_path: '/metrics' + scrape_interval: 10s + + - job_name: 'risk-guardian' + static_configs: + - targets: ['host.docker.internal:3002'] + metrics_path: '/metrics' + scrape_interval: 10s + + # Infrastructure + - job_name: 'dragonfly' + static_configs: + - targets: ['dragonfly:6379'] + + - job_name: 'postgres' + static_configs: + - targets: ['postgres:5432'] + + - job_name: 'questdb' + static_configs: + - targets: ['questdb:9000'] diff --git a/monitoring/prometheus/prometheus.yml b/monitoring/prometheus/prometheus.yml index aec7c80..b2e9929 100644 --- a/monitoring/prometheus/prometheus.yml +++ b/monitoring/prometheus/prometheus.yml @@ -1,26 +1,26 @@ -global: - scrape_interval: 15s - evaluation_interval: 15s - -rule_files: - # - "first_rules.yml" - # - "second_rules.yml" - -scrape_configs: - # The job name is added as a label `job=` to any timeseries scraped from this config. - - job_name: 'prometheus' - static_configs: - - targets: ['localhost:9090'] - - # Add other services as they become available - # - job_name: 'trading-bot' - # static_configs: - # - targets: ['localhost:3001'] - - # - job_name: 'market-data-gateway' - # static_configs: - # - targets: ['localhost:3002'] - - # - job_name: 'risk-guardian' - # static_configs: - # - targets: ['localhost:3003'] +global: + scrape_interval: 15s + evaluation_interval: 15s + +rule_files: + # - "first_rules.yml" + # - "second_rules.yml" + +scrape_configs: + # The job name is added as a label `job=` to any timeseries scraped from this config. + - job_name: 'prometheus' + static_configs: + - targets: ['localhost:9090'] + + # Add other services as they become available + # - job_name: 'trading-bot' + # static_configs: + # - targets: ['localhost:3001'] + + # - job_name: 'market-data-gateway' + # static_configs: + # - targets: ['localhost:3002'] + + # - job_name: 'risk-guardian' + # static_configs: + # - targets: ['localhost:3003'] diff --git a/package.json b/package.json index e9678cb..329147b 100644 --- a/package.json +++ b/package.json @@ -1,75 +1,75 @@ -{ - "name": "stock-bot", - "private": true, - "version": "1.0.0", - "description": "Advanced trading bot with microservice architecture", - "type": "module", - "scripts": { - "dev": "turbo run dev", - "build": "powershell ./scripts/build-all.ps1", - "build:all:clean": "powershell ./scripts/build-all.ps1 -Clean", - "build:all:verbose": "powershell ./scripts/build-all.ps1 -Verbose", - "build:libs": "powershell ./scripts/build-libs.ps1", - "test": "turbo run test", - "test:watch": "bun test --watch", - "test:coverage": "bun test --coverage", - "test:unit": "bun test test/unit", - "test:integration": "bun test test/integration", - "test:e2e": "bun test test/e2e", - "test:libs": "turbo run test --filter='./libs/*'", - "test:apps": "turbo run test --filter=./apps/*/*", - "lint": "turbo run lint", - "start": "turbo run start", - "clean": "turbo run clean", - "clean:cache": "powershell ./scripts/clean.ps1 -cache", - "clean:dist": "powershell ./scripts/clean.ps1 -dist", - "clean:modules": "powershell ./scripts/clean.ps1 -modules", - "clean:all": "powershell ./scripts/clean.ps1 -all", - "clean:all:force": "powershell ./scripts/clean.ps1 -all -force", - "clean:fresh": "powershell ./scripts/clean.ps1 -fresh", - "clean:fresh:force": "powershell ./scripts/clean.ps1 -fresh -force", - "backtest": "turbo run backtest", - "docker:start": "powershell ./scripts/docker.ps1 start", - "docker:stop": "powershell ./scripts/docker.ps1 stop", - "docker:restart": "powershell ./scripts/docker.ps1 restart", - "docker:status": "powershell ./scripts/docker.ps1 status", - "docker:logs": "powershell ./scripts/docker.ps1 logs", - "docker:reset": "powershell ./scripts/docker.ps1 reset", - "docker:admin": "powershell ./scripts/docker.ps1 admin", - "docker:monitoring": "powershell ./scripts/docker.ps1 monitoring", - "infra:up": "docker-compose up -d dragonfly postgres questdb mongodb", - "infra:down": "docker-compose down", - "infra:reset": "docker-compose down -v && docker-compose up -d dragonfly postgres questdb mongodb", - "dev:full": "npm run infra:up && npm run docker:admin && turbo run dev", - "dev:clean": "npm run infra:reset && npm run dev:full", - "proxy": "bun run ./apps/data-service/src/proxy-demo.ts" - }, - "workspaces": [ - "libs/*", - "apps/*" - ], - "devDependencies": { - "@testcontainers/mongodb": "^10.7.2", - "@testcontainers/postgresql": "^10.7.2", - "@types/bun": "latest", - "@types/node": "^22.15.30", - "@types/supertest": "^6.0.2", - "@types/yup": "^0.32.0", - "bun-types": "^1.2.15", - "mongodb-memory-server": "^9.1.6", - "pg-mem": "^2.8.1", - "supertest": "^6.3.4", - "turbo": "^2.5.4", - "typescript": "^5.8.3", - "yup": "^1.6.1" - }, - "packageManager": "bun@1.1.12", - "engines": { - "node": ">=18.0.0", - "bun": ">=1.1.0" - }, - "dependencies": { - "bullmq": "^5.53.2", - }, - "trustedDependencies": ["mongodb"] -} +{ + "name": "stock-bot", + "private": true, + "version": "1.0.0", + "description": "Advanced trading bot with microservice architecture", + "type": "module", + "scripts": { + "dev": "turbo run dev", + "build": "powershell ./scripts/build-all.ps1", + "build:all:clean": "powershell ./scripts/build-all.ps1 -Clean", + "build:all:verbose": "powershell ./scripts/build-all.ps1 -Verbose", + "build:libs": "powershell ./scripts/build-libs.ps1", + "test": "turbo run test", + "test:watch": "bun test --watch", + "test:coverage": "bun test --coverage", + "test:unit": "bun test test/unit", + "test:integration": "bun test test/integration", + "test:e2e": "bun test test/e2e", + "test:libs": "turbo run test --filter='./libs/*'", + "test:apps": "turbo run test --filter=./apps/*/*", + "lint": "turbo run lint", + "start": "turbo run start", + "clean": "turbo run clean", + "clean:cache": "powershell ./scripts/clean.ps1 -cache", + "clean:dist": "powershell ./scripts/clean.ps1 -dist", + "clean:modules": "powershell ./scripts/clean.ps1 -modules", + "clean:all": "powershell ./scripts/clean.ps1 -all", + "clean:all:force": "powershell ./scripts/clean.ps1 -all -force", + "clean:fresh": "powershell ./scripts/clean.ps1 -fresh", + "clean:fresh:force": "powershell ./scripts/clean.ps1 -fresh -force", + "backtest": "turbo run backtest", + "docker:start": "powershell ./scripts/docker.ps1 start", + "docker:stop": "powershell ./scripts/docker.ps1 stop", + "docker:restart": "powershell ./scripts/docker.ps1 restart", + "docker:status": "powershell ./scripts/docker.ps1 status", + "docker:logs": "powershell ./scripts/docker.ps1 logs", + "docker:reset": "powershell ./scripts/docker.ps1 reset", + "docker:admin": "powershell ./scripts/docker.ps1 admin", + "docker:monitoring": "powershell ./scripts/docker.ps1 monitoring", + "infra:up": "docker-compose up -d dragonfly postgres questdb mongodb", + "infra:down": "docker-compose down", + "infra:reset": "docker-compose down -v && docker-compose up -d dragonfly postgres questdb mongodb", + "dev:full": "npm run infra:up && npm run docker:admin && turbo run dev", + "dev:clean": "npm run infra:reset && npm run dev:full", + "proxy": "bun run ./apps/data-service/src/proxy-demo.ts" + }, + "workspaces": [ + "libs/*", + "apps/*" + ], + "devDependencies": { + "@testcontainers/mongodb": "^10.7.2", + "@testcontainers/postgresql": "^10.7.2", + "@types/bun": "latest", + "@types/node": "^22.15.30", + "@types/supertest": "^6.0.2", + "@types/yup": "^0.32.0", + "bun-types": "^1.2.15", + "mongodb-memory-server": "^9.1.6", + "pg-mem": "^2.8.1", + "supertest": "^6.3.4", + "turbo": "^2.5.4", + "typescript": "^5.8.3", + "yup": "^1.6.1" + }, + "packageManager": "bun@1.1.12", + "engines": { + "node": ">=18.0.0", + "bun": ">=1.1.0" + }, + "dependencies": { + "bullmq": "^5.53.2", + }, + "trustedDependencies": ["mongodb"] +} diff --git a/scripts/build-all.bat b/scripts/build-all.bat index a558060..0a1f89b 100644 --- a/scripts/build-all.bat +++ b/scripts/build-all.bat @@ -1,85 +1,85 @@ -@echo off -REM Build All Script for Stock Bot (Batch version) -REM Builds libraries first, then apps with Turbo, then dashboard with Angular CLI - -echo πŸš€ Starting complete build process... - -REM Store original directory -set "ORIGINAL_DIR=%CD%" -cd /d "g:\repos\stock-bot" - -REM Step 1: Build libraries first -echo πŸ“š Building libraries... -call powershell ./scripts/build-libs.ps1 -if %ERRORLEVEL% NEQ 0 ( - echo ❌ Library build failed - cd /d "%ORIGINAL_DIR%" - exit /b 1 -) - -REM Step 2: Build apps with Turbo (excluding dashboard) -echo πŸ—οΈ Building applications with Turbo... - -REM Check if each app exists and build individually -if exist "apps\data-service" ( - echo Building data-service... - call turbo run build --filter="./apps/data-service" - if %ERRORLEVEL% NEQ 0 ( - echo ❌ data-service build failed - cd /d "%ORIGINAL_DIR%" - exit /b 1 - ) -) - -if exist "apps\execution-service" ( - echo Building execution-service... - call turbo run build --filter="./apps/execution-service" - if %ERRORLEVEL% NEQ 0 ( - echo ❌ execution-service build failed - cd /d "%ORIGINAL_DIR%" - exit /b 1 - ) -) - -if exist "apps\portfolio-service" ( - echo Building portfolio-service... - call turbo run build --filter="./apps/portfolio-service" - if %ERRORLEVEL% NEQ 0 ( - echo ❌ portfolio-service build failed - cd /d "%ORIGINAL_DIR%" - exit /b 1 - ) -) - -if exist "apps\processing-service" ( - echo Building processing-service... - call turbo run build --filter="./apps/processing-service" - if %ERRORLEVEL% NEQ 0 ( - echo ❌ processing-service build failed - cd /d "%ORIGINAL_DIR%" - exit /b 1 - ) -) - -if exist "apps\strategy-service" ( - echo Building strategy-service... - call turbo run build --filter="./apps/strategy-service" - if %ERRORLEVEL% NEQ 0 ( - echo ❌ strategy-service build failed - cd /d "%ORIGINAL_DIR%" - exit /b 1 - ) -) - -REM Step 3: Build dashboard with Angular CLI -echo 🎨 Building Angular dashboard... -cd apps\dashboard -call ng build --configuration production -if %ERRORLEVEL% NEQ 0 ( - echo ❌ Dashboard build failed - cd /d "%ORIGINAL_DIR%" - exit /b 1 -) - -cd /d "%ORIGINAL_DIR%" -echo πŸŽ‰ Complete build finished successfully! +@echo off +REM Build All Script for Stock Bot (Batch version) +REM Builds libraries first, then apps with Turbo, then dashboard with Angular CLI + +echo πŸš€ Starting complete build process... + +REM Store original directory +set "ORIGINAL_DIR=%CD%" +cd /d "g:\repos\stock-bot" + +REM Step 1: Build libraries first +echo πŸ“š Building libraries... +call powershell ./scripts/build-libs.ps1 +if %ERRORLEVEL% NEQ 0 ( + echo ❌ Library build failed + cd /d "%ORIGINAL_DIR%" + exit /b 1 +) + +REM Step 2: Build apps with Turbo (excluding dashboard) +echo πŸ—οΈ Building applications with Turbo... + +REM Check if each app exists and build individually +if exist "apps\data-service" ( + echo Building data-service... + call turbo run build --filter="./apps/data-service" + if %ERRORLEVEL% NEQ 0 ( + echo ❌ data-service build failed + cd /d "%ORIGINAL_DIR%" + exit /b 1 + ) +) + +if exist "apps\execution-service" ( + echo Building execution-service... + call turbo run build --filter="./apps/execution-service" + if %ERRORLEVEL% NEQ 0 ( + echo ❌ execution-service build failed + cd /d "%ORIGINAL_DIR%" + exit /b 1 + ) +) + +if exist "apps\portfolio-service" ( + echo Building portfolio-service... + call turbo run build --filter="./apps/portfolio-service" + if %ERRORLEVEL% NEQ 0 ( + echo ❌ portfolio-service build failed + cd /d "%ORIGINAL_DIR%" + exit /b 1 + ) +) + +if exist "apps\processing-service" ( + echo Building processing-service... + call turbo run build --filter="./apps/processing-service" + if %ERRORLEVEL% NEQ 0 ( + echo ❌ processing-service build failed + cd /d "%ORIGINAL_DIR%" + exit /b 1 + ) +) + +if exist "apps\strategy-service" ( + echo Building strategy-service... + call turbo run build --filter="./apps/strategy-service" + if %ERRORLEVEL% NEQ 0 ( + echo ❌ strategy-service build failed + cd /d "%ORIGINAL_DIR%" + exit /b 1 + ) +) + +REM Step 3: Build dashboard with Angular CLI +echo 🎨 Building Angular dashboard... +cd apps\dashboard +call ng build --configuration production +if %ERRORLEVEL% NEQ 0 ( + echo ❌ Dashboard build failed + cd /d "%ORIGINAL_DIR%" + exit /b 1 +) + +cd /d "%ORIGINAL_DIR%" +echo πŸŽ‰ Complete build finished successfully! diff --git a/scripts/build-all.ps1 b/scripts/build-all.ps1 index 7b2837f..92ff6ab 100644 --- a/scripts/build-all.ps1 +++ b/scripts/build-all.ps1 @@ -1,108 +1,108 @@ -# Build All Script for Stock Bot -# Builds libraries first, then apps with Turbo, then dashboard with Angular CLI - -param( - [switch]$Clean, - [switch]$Verbose -) - -$ErrorActionPreference = "Stop" - -Write-Host "πŸš€ Starting complete build process..." -ForegroundColor Cyan - -# Store original location -$originalLocation = Get-Location -Set-Location "g:\repos\stock-bot" - -try { - # Step 1: Clean if requested - if ($Clean) { - Write-Host "🧹 Cleaning previous builds..." -ForegroundColor Yellow - & powershell ./scripts/clean.ps1 -dist - if ($LASTEXITCODE -ne 0) { - throw "Clean failed" - } - } - - # Step 2: Build libraries first - Write-Host "πŸ“š Building libraries..." -ForegroundColor Green - & powershell ./scripts/build-libs.ps1 - if ($LASTEXITCODE -ne 0) { - throw "Library build failed" - } # Step 3: Build apps with Turbo (excluding dashboard) - Write-Host "πŸ—οΈ Building applications with Turbo..." -ForegroundColor Green - - # Get list of apps excluding dashboard - $appDirs = Get-ChildItem -Path "apps" -Directory | Where-Object { $_.Name -ne "dashboard" } - - if ($appDirs.Count -gt 0) { - # Build each app individually to avoid filter syntax issues - foreach ($app in $appDirs) { - $appPath = "./apps/$($app.Name)" - Write-Host " Building $($app.Name)..." -ForegroundColor Cyan - - $turboCmd = "turbo run build --filter=$appPath" - - if ($Verbose) { - Write-Host " Running: $turboCmd" -ForegroundColor DarkGray - } - - Invoke-Expression $turboCmd - if ($LASTEXITCODE -ne 0) { - throw "Failed to build app: $($app.Name)" - } - } - - Write-Host "βœ… Apps built successfully: $($appDirs.Name -join ', ')" -ForegroundColor Green - } else { - Write-Host "ℹ️ No non-dashboard apps found to build with Turbo" -ForegroundColor Yellow - } - - # Step 4: Build dashboard with Angular CLI - $dashboardPath = "apps/dashboard" - if (Test-Path $dashboardPath) { - Write-Host "🎨 Building Angular dashboard..." -ForegroundColor Green - - Set-Location $dashboardPath - - # Check if ng is available - try { - ng version | Out-Null - } catch { - Write-Host "❌ Angular CLI not found. Installing..." -ForegroundColor Red - npm install -g @angular/cli - if ($LASTEXITCODE -ne 0) { - throw "Failed to install Angular CLI" - } - } - - # Build dashboard - ng build --configuration production - if ($LASTEXITCODE -ne 0) { - throw "Dashboard build failed" - } - - Write-Host "βœ… Dashboard built successfully" -ForegroundColor Green - Set-Location $originalLocation - } else { - Write-Host "⚠️ Dashboard not found at $dashboardPath" -ForegroundColor Yellow - } - - Write-Host "πŸŽ‰ Complete build finished successfully!" -ForegroundColor Green - Write-Host "" - Write-Host "Build Summary:" -ForegroundColor Cyan - Write-Host " βœ… Libraries built" -ForegroundColor Green - if ($appFilters.Count -gt 0) { - Write-Host " βœ… Apps built: $($appDirs.Name -join ', ')" -ForegroundColor Green - } - if (Test-Path "apps/dashboard") { - Write-Host " βœ… Dashboard built" -ForegroundColor Green - } - -} catch { - Write-Host "❌ Build failed: $($_.Exception.Message)" -ForegroundColor Red - Set-Location $originalLocation - exit 1 -} finally { - Set-Location $originalLocation -} +# Build All Script for Stock Bot +# Builds libraries first, then apps with Turbo, then dashboard with Angular CLI + +param( + [switch]$Clean, + [switch]$Verbose +) + +$ErrorActionPreference = "Stop" + +Write-Host "πŸš€ Starting complete build process..." -ForegroundColor Cyan + +# Store original location +$originalLocation = Get-Location +Set-Location "g:\repos\stock-bot" + +try { + # Step 1: Clean if requested + if ($Clean) { + Write-Host "🧹 Cleaning previous builds..." -ForegroundColor Yellow + & powershell ./scripts/clean.ps1 -dist + if ($LASTEXITCODE -ne 0) { + throw "Clean failed" + } + } + + # Step 2: Build libraries first + Write-Host "πŸ“š Building libraries..." -ForegroundColor Green + & powershell ./scripts/build-libs.ps1 + if ($LASTEXITCODE -ne 0) { + throw "Library build failed" + } # Step 3: Build apps with Turbo (excluding dashboard) + Write-Host "πŸ—οΈ Building applications with Turbo..." -ForegroundColor Green + + # Get list of apps excluding dashboard + $appDirs = Get-ChildItem -Path "apps" -Directory | Where-Object { $_.Name -ne "dashboard" } + + if ($appDirs.Count -gt 0) { + # Build each app individually to avoid filter syntax issues + foreach ($app in $appDirs) { + $appPath = "./apps/$($app.Name)" + Write-Host " Building $($app.Name)..." -ForegroundColor Cyan + + $turboCmd = "turbo run build --filter=$appPath" + + if ($Verbose) { + Write-Host " Running: $turboCmd" -ForegroundColor DarkGray + } + + Invoke-Expression $turboCmd + if ($LASTEXITCODE -ne 0) { + throw "Failed to build app: $($app.Name)" + } + } + + Write-Host "βœ… Apps built successfully: $($appDirs.Name -join ', ')" -ForegroundColor Green + } else { + Write-Host "ℹ️ No non-dashboard apps found to build with Turbo" -ForegroundColor Yellow + } + + # Step 4: Build dashboard with Angular CLI + $dashboardPath = "apps/dashboard" + if (Test-Path $dashboardPath) { + Write-Host "🎨 Building Angular dashboard..." -ForegroundColor Green + + Set-Location $dashboardPath + + # Check if ng is available + try { + ng version | Out-Null + } catch { + Write-Host "❌ Angular CLI not found. Installing..." -ForegroundColor Red + npm install -g @angular/cli + if ($LASTEXITCODE -ne 0) { + throw "Failed to install Angular CLI" + } + } + + # Build dashboard + ng build --configuration production + if ($LASTEXITCODE -ne 0) { + throw "Dashboard build failed" + } + + Write-Host "βœ… Dashboard built successfully" -ForegroundColor Green + Set-Location $originalLocation + } else { + Write-Host "⚠️ Dashboard not found at $dashboardPath" -ForegroundColor Yellow + } + + Write-Host "πŸŽ‰ Complete build finished successfully!" -ForegroundColor Green + Write-Host "" + Write-Host "Build Summary:" -ForegroundColor Cyan + Write-Host " βœ… Libraries built" -ForegroundColor Green + if ($appFilters.Count -gt 0) { + Write-Host " βœ… Apps built: $($appDirs.Name -join ', ')" -ForegroundColor Green + } + if (Test-Path "apps/dashboard") { + Write-Host " βœ… Dashboard built" -ForegroundColor Green + } + +} catch { + Write-Host "❌ Build failed: $($_.Exception.Message)" -ForegroundColor Red + Set-Location $originalLocation + exit 1 +} finally { + Set-Location $originalLocation +} diff --git a/scripts/build-clean.ps1 b/scripts/build-clean.ps1 index 6ece35a..41d231b 100644 --- a/scripts/build-clean.ps1 +++ b/scripts/build-clean.ps1 @@ -1,64 +1,64 @@ -param( - [switch]$force -) - -Write-Host "=== Clean Build Process ===" -ForegroundColor Green - -# Step 1: Clean everything -Write-Host "Step 1: Cleaning build artifacts..." -ForegroundColor Yellow -& ".\scripts\clean.ps1" -dist -force - -# Step 2: Install dependencies -Write-Host "Step 2: Installing dependencies..." -ForegroundColor Yellow -bun install - -# Step 3: Build libraries in dependency order -Write-Host "Step 3: Building libraries..." -ForegroundColor Yellow - -$libraries = @( - "types", - "config", - "logger", - "utils", - "postgres-client", - "mongodb-client", - "questdb-client", - "cache", - "http", - "event-bus", - "shutdown", - "data-frame", - "vector-engine", - "strategy-engine", - "data-adjustments" -) - -foreach ($lib in $libraries) { - $libPath = "libs\$lib" - if (Test-Path $libPath) { - Write-Host "Building $lib..." -ForegroundColor Blue - Set-Location $libPath - bun run build - if ($LASTEXITCODE -ne 0) { - Write-Host "Failed to build $lib" -ForegroundColor Red - Set-Location "..\..\" - exit 1 - } - Set-Location "..\..\" - Write-Host "βœ“ $lib built successfully" -ForegroundColor Green - } else { - Write-Host "⚠ Library $lib not found, skipping..." -ForegroundColor Yellow - } -} - -# Step 4: Build applications -Write-Host "Step 4: Building applications..." -ForegroundColor Yellow -turbo run build --filter='./apps/*' - -if ($LASTEXITCODE -ne 0) { - Write-Host "Failed to build applications" -ForegroundColor Red - exit 1 -} - -Write-Host "=== Clean Build Complete! ===" -ForegroundColor Green -Write-Host "All packages built successfully" -ForegroundColor Blue +param( + [switch]$force +) + +Write-Host "=== Clean Build Process ===" -ForegroundColor Green + +# Step 1: Clean everything +Write-Host "Step 1: Cleaning build artifacts..." -ForegroundColor Yellow +& ".\scripts\clean.ps1" -dist -force + +# Step 2: Install dependencies +Write-Host "Step 2: Installing dependencies..." -ForegroundColor Yellow +bun install + +# Step 3: Build libraries in dependency order +Write-Host "Step 3: Building libraries..." -ForegroundColor Yellow + +$libraries = @( + "types", + "config", + "logger", + "utils", + "postgres-client", + "mongodb-client", + "questdb-client", + "cache", + "http", + "event-bus", + "shutdown", + "data-frame", + "vector-engine", + "strategy-engine", + "data-adjustments" +) + +foreach ($lib in $libraries) { + $libPath = "libs\$lib" + if (Test-Path $libPath) { + Write-Host "Building $lib..." -ForegroundColor Blue + Set-Location $libPath + bun run build + if ($LASTEXITCODE -ne 0) { + Write-Host "Failed to build $lib" -ForegroundColor Red + Set-Location "..\..\" + exit 1 + } + Set-Location "..\..\" + Write-Host "βœ“ $lib built successfully" -ForegroundColor Green + } else { + Write-Host "⚠ Library $lib not found, skipping..." -ForegroundColor Yellow + } +} + +# Step 4: Build applications +Write-Host "Step 4: Building applications..." -ForegroundColor Yellow +turbo run build --filter='./apps/*' + +if ($LASTEXITCODE -ne 0) { + Write-Host "Failed to build applications" -ForegroundColor Red + exit 1 +} + +Write-Host "=== Clean Build Complete! ===" -ForegroundColor Green +Write-Host "All packages built successfully" -ForegroundColor Blue diff --git a/scripts/build-libs.ps1 b/scripts/build-libs.ps1 index 28b0f2b..bd4dab8 100644 --- a/scripts/build-libs.ps1 +++ b/scripts/build-libs.ps1 @@ -1,43 +1,43 @@ -# Build and install the new libraries - -Write-Host "Building and installing new libraries..." -ForegroundColor Cyan - -# Build order is important due to dependencies -$libs = @( - "types", # Base types - no dependencies - "config", # Configuration - depends on types - "logger", # Logging utilities - depends on types - "utils", # Utilities - depends on types and config - # Database clients - "postgres-client", # PostgreSQL client - depends on types, config, logger - # "mongodb-client", # MongoDB client - depends on types, config, logger (temporarily disabled - needs zod->yup conversion) - "questdb-client", # QuestDB client - depends on types, config, logger - - # Service libraries - "cache", # Cache - depends on types and logger - "http", # HTTP client - depends on types, config, logger - "event-bus", # Event bus - depends on types, logger - "shutdown", # Shutdown - depends on types, logger - - # Engine libraries - "data-frame", # Data frame - depends on types, utils - "vector-engine", # Vector engine - depends on types, utils, data-frame - "strategy-engine" # Strategy engine - depends on types, utils, event-bus -) - -# Build each library in order -foreach ($lib in $libs) { - $libPath = "g:\repos\stock-bot\libs\$lib" - - Write-Host "Building $lib..." -ForegroundColor Green - Set-Location $libPath - bun run build - - if ($LASTEXITCODE -ne 0) { - Write-Host "Failed to build $lib. Exiting." -ForegroundColor Red - exit 1 - } -} - -Write-Host "All libraries built successfully!" -ForegroundColor Green -Set-Location g:\repos\stock-bot +# Build and install the new libraries + +Write-Host "Building and installing new libraries..." -ForegroundColor Cyan + +# Build order is important due to dependencies +$libs = @( + "types", # Base types - no dependencies + "config", # Configuration - depends on types + "logger", # Logging utilities - depends on types + "utils", # Utilities - depends on types and config + # Database clients + "postgres-client", # PostgreSQL client - depends on types, config, logger + # "mongodb-client", # MongoDB client - depends on types, config, logger (temporarily disabled - needs zod->yup conversion) + "questdb-client", # QuestDB client - depends on types, config, logger + + # Service libraries + "cache", # Cache - depends on types and logger + "http", # HTTP client - depends on types, config, logger + "event-bus", # Event bus - depends on types, logger + "shutdown", # Shutdown - depends on types, logger + + # Engine libraries + "data-frame", # Data frame - depends on types, utils + "vector-engine", # Vector engine - depends on types, utils, data-frame + "strategy-engine" # Strategy engine - depends on types, utils, event-bus +) + +# Build each library in order +foreach ($lib in $libs) { + $libPath = "g:\repos\stock-bot\libs\$lib" + + Write-Host "Building $lib..." -ForegroundColor Green + Set-Location $libPath + bun run build + + if ($LASTEXITCODE -ne 0) { + Write-Host "Failed to build $lib. Exiting." -ForegroundColor Red + exit 1 + } +} + +Write-Host "All libraries built successfully!" -ForegroundColor Green +Set-Location g:\repos\stock-bot diff --git a/scripts/clean.ps1 b/scripts/clean.ps1 index 351321a..708ee55 100644 --- a/scripts/clean.ps1 +++ b/scripts/clean.ps1 @@ -1,182 +1,182 @@ -param( - [switch]$modules, - [switch]$dist, - [switch]$cache, - [switch]$tsbuildinfo, - [switch]$all, - [switch]$fresh, - [switch]$force -) - -function Remove-DirectoriesByName { - param([string]$Name, [string]$Description) - - Write-Host "Removing $Description..." -ForegroundColor Blue - $directories = Get-ChildItem -Path . -Name $Name -Recurse -Directory -ErrorAction SilentlyContinue - - if ($directories.Count -gt 0) { - Write-Host "Found $($directories.Count) $Description to remove" -ForegroundColor Gray - $directories | ForEach-Object { - Remove-Item $_ -Recurse -Force -ErrorAction SilentlyContinue - Write-Host " Removed: $_" -ForegroundColor Gray - } - } else { - Write-Host "No $Description found" -ForegroundColor Gray - } -} - -function Remove-FilesByPattern { - param([string]$Pattern, [string]$Description) - - Write-Host "Removing $Description..." -ForegroundColor Blue - $files = Get-ChildItem -Path . -Name $Pattern -Recurse -File -ErrorAction SilentlyContinue - - if ($files.Count -gt 0) { - Write-Host "Found $($files.Count) $Description to remove" -ForegroundColor Gray - $files | ForEach-Object { - Remove-Item $_ -Force -ErrorAction SilentlyContinue - Write-Host " Removed: $_" -ForegroundColor Gray - } - } else { - Write-Host "No $Description found" -ForegroundColor Gray - } -} - -Write-Host "Starting cleanup..." -ForegroundColor Yellow - -if ($all -or $fresh) { - if (-not $force) { - Write-Host "WARNING: This will remove ALL build artifacts, caches, dependencies, and temporary files!" -ForegroundColor Red - Write-Host "This includes: node_modules, dist, all caches, logs, databases, and lock files" -ForegroundColor Yellow - $confirmation = Read-Host "Are you sure you want to continue? (y/N)" - if ($confirmation -ne 'y' -and $confirmation -ne 'Y') { - Write-Host "Operation cancelled." -ForegroundColor Yellow - exit 0 - } - } - - Write-Host "=== NUCLEAR CLEAN: Removing EVERYTHING ===" -ForegroundColor Red - - # Dependencies and packages - Remove-DirectoriesByName "node_modules" "node_modules directories" - - # Build outputs - Remove-DirectoriesByName "dist" "dist directories" - Remove-DirectoriesByName "build" "build directories" - Remove-DirectoriesByName "lib" "lib directories" - Remove-DirectoriesByName "out" "out directories" - Remove-DirectoriesByName ".out" "build output directories" - - # Cache directories - Remove-DirectoriesByName ".turbo" "Turborepo cache directories" - Remove-DirectoriesByName ".next" "Next.js cache directories" - Remove-DirectoriesByName ".parcel-cache" "Parcel cache directories" - Remove-DirectoriesByName ".angular" "Angular CLI cache directories" - Remove-DirectoriesByName ".nuxt" "Nuxt.js cache directories" - Remove-DirectoriesByName ".vite" "Vite cache directories" - Remove-DirectoriesByName ".webpack" "Webpack cache directories" - Remove-DirectoriesByName ".rollup.cache" "Rollup cache directories" - - # Test and coverage - Remove-DirectoriesByName "coverage" "test coverage directories" - Remove-DirectoriesByName ".nyc_output" "NYC coverage directories" - Remove-DirectoriesByName ".jest" "Jest cache directories" - Remove-DirectoriesByName ".vitest" "Vitest cache directories" - - # Storybook - Remove-DirectoriesByName ".storybook-out" "Storybook build directories" - Remove-DirectoriesByName "storybook-static" "Storybook static directories" - - # Temporary and log directories - Remove-DirectoriesByName "tmp" "temporary directories" - Remove-DirectoriesByName "temp" "temp directories" - Remove-DirectoriesByName ".tmp" "hidden temp directories" - Remove-DirectoriesByName "logs" "log directories" - Remove-DirectoriesByName ".logs" "hidden log directories" - - # Project specific (from .gitignore) - Remove-DirectoriesByName ".data" "data directories" - Remove-DirectoriesByName ".backtest-results" "backtest result directories" - Remove-DirectoriesByName ".old" "old backup directories" - Remove-DirectoriesByName ".mongo" "MongoDB data directories" - Remove-DirectoriesByName ".chat" "chat directories" - - Write-Host "Removing lock files..." -ForegroundColor Blue - Remove-Item -Path "bun.lockb" -Force -ErrorAction SilentlyContinue - Remove-Item -Path "package-lock.json" -Force -ErrorAction SilentlyContinue - Remove-Item -Path "yarn.lock" -Force -ErrorAction SilentlyContinue - Remove-Item -Path "pnpm-lock.yaml" -Force -ErrorAction SilentlyContinue - Get-ChildItem -Path . -Name "bun.lockb" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue - Get-ChildItem -Path . -Name "package-lock.json" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue - Get-ChildItem -Path . -Name "yarn.lock" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue - Get-ChildItem -Path . -Name "pnpm-lock.yaml" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue - - # TypeScript and build files - Remove-FilesByPattern "*.tsbuildinfo" "TypeScript build info files" - Remove-FilesByPattern ".eslintcache" "ESLint cache files" - Remove-FilesByPattern ".stylelintcache" "Stylelint cache files" - Remove-FilesByPattern ".prettiercache" "Prettier cache files" - Remove-FilesByPattern "*.d.ts" "TypeScript build info files" - - # Database files - Remove-FilesByPattern "*.db" "database files" - Remove-FilesByPattern "*.sqlite" "SQLite database files" - Remove-FilesByPattern "*.sqlite3" "SQLite3 database files" - - # Log files - Remove-FilesByPattern "*.log" "log files" - Remove-FilesByPattern "npm-debug.log*" "npm debug logs" - Remove-FilesByPattern "yarn-debug.log*" "yarn debug logs" - Remove-FilesByPattern "yarn-error.log*" "yarn error logs" - Remove-FilesByPattern "lerna-debug.log*" "lerna debug logs" - - # OS generated files - Remove-FilesByPattern ".DS_Store" "macOS .DS_Store files" - Remove-FilesByPattern "Thumbs.db" "Windows thumbnail files" - Remove-FilesByPattern "ehthumbs.db" "Windows thumbnail cache files" - Remove-FilesByPattern "Desktop.ini" "Windows desktop files" - - Write-Host "=== NUCLEAR CLEAN COMPLETE ===" -ForegroundColor Red - Write-Host "Cleanup complete - no need for turbo clean" -ForegroundColor Blue -} -elseif ($modules) { - Remove-DirectoriesByName "node_modules" "node_modules directories" - - Write-Host "Removing lock files..." -ForegroundColor Blue - Remove-Item -Path "bun.lockb" -Force -ErrorAction SilentlyContinue - Get-ChildItem -Path . -Name "bun.lockb" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue -} -elseif ($dist) { - Remove-DirectoriesByName "dist" "dist directories" - Remove-DirectoriesByName ".turbo" "Turborepo cache directories" - Remove-DirectoriesByName ".next" "Next.js cache directories" - Remove-DirectoriesByName ".parcel-cache" "Parcel cache directories" - Remove-DirectoriesByName ".angular" "Angular CLI cache directories" - - Remove-FilesByPattern "*.tsbuildinfo" "TypeScript build info files" - Remove-FilesByPattern ".eslintcache" "ESLint cache files" -} -elseif ($cache) { - Remove-DirectoriesByName ".turbo" "Turborepo cache directories" - Remove-DirectoriesByName ".next" "Next.js cache directories" - Remove-DirectoriesByName ".parcel-cache" "Parcel cache directories" - Remove-DirectoriesByName ".angular" "Angular CLI cache directories" - Remove-DirectoriesByName "coverage" "test coverage directories" - Remove-DirectoriesByName ".nyc_output" "NYC coverage directories" - Remove-FilesByPattern ".eslintcache" "ESLint cache files" -} -elseif ($tsbuildinfo) { - Remove-FilesByPattern "*.tsbuildinfo" "TypeScript build info files" -} -else { - Write-Host "Running turbo clean..." -ForegroundColor Blue - # Only run turbo clean for the default case - turbo run clean -} - -if ($fresh) { - Write-Host "Installing dependencies..." -ForegroundColor Green - bun install -} - +param( + [switch]$modules, + [switch]$dist, + [switch]$cache, + [switch]$tsbuildinfo, + [switch]$all, + [switch]$fresh, + [switch]$force +) + +function Remove-DirectoriesByName { + param([string]$Name, [string]$Description) + + Write-Host "Removing $Description..." -ForegroundColor Blue + $directories = Get-ChildItem -Path . -Name $Name -Recurse -Directory -ErrorAction SilentlyContinue + + if ($directories.Count -gt 0) { + Write-Host "Found $($directories.Count) $Description to remove" -ForegroundColor Gray + $directories | ForEach-Object { + Remove-Item $_ -Recurse -Force -ErrorAction SilentlyContinue + Write-Host " Removed: $_" -ForegroundColor Gray + } + } else { + Write-Host "No $Description found" -ForegroundColor Gray + } +} + +function Remove-FilesByPattern { + param([string]$Pattern, [string]$Description) + + Write-Host "Removing $Description..." -ForegroundColor Blue + $files = Get-ChildItem -Path . -Name $Pattern -Recurse -File -ErrorAction SilentlyContinue + + if ($files.Count -gt 0) { + Write-Host "Found $($files.Count) $Description to remove" -ForegroundColor Gray + $files | ForEach-Object { + Remove-Item $_ -Force -ErrorAction SilentlyContinue + Write-Host " Removed: $_" -ForegroundColor Gray + } + } else { + Write-Host "No $Description found" -ForegroundColor Gray + } +} + +Write-Host "Starting cleanup..." -ForegroundColor Yellow + +if ($all -or $fresh) { + if (-not $force) { + Write-Host "WARNING: This will remove ALL build artifacts, caches, dependencies, and temporary files!" -ForegroundColor Red + Write-Host "This includes: node_modules, dist, all caches, logs, databases, and lock files" -ForegroundColor Yellow + $confirmation = Read-Host "Are you sure you want to continue? (y/N)" + if ($confirmation -ne 'y' -and $confirmation -ne 'Y') { + Write-Host "Operation cancelled." -ForegroundColor Yellow + exit 0 + } + } + + Write-Host "=== NUCLEAR CLEAN: Removing EVERYTHING ===" -ForegroundColor Red + + # Dependencies and packages + Remove-DirectoriesByName "node_modules" "node_modules directories" + + # Build outputs + Remove-DirectoriesByName "dist" "dist directories" + Remove-DirectoriesByName "build" "build directories" + Remove-DirectoriesByName "lib" "lib directories" + Remove-DirectoriesByName "out" "out directories" + Remove-DirectoriesByName ".out" "build output directories" + + # Cache directories + Remove-DirectoriesByName ".turbo" "Turborepo cache directories" + Remove-DirectoriesByName ".next" "Next.js cache directories" + Remove-DirectoriesByName ".parcel-cache" "Parcel cache directories" + Remove-DirectoriesByName ".angular" "Angular CLI cache directories" + Remove-DirectoriesByName ".nuxt" "Nuxt.js cache directories" + Remove-DirectoriesByName ".vite" "Vite cache directories" + Remove-DirectoriesByName ".webpack" "Webpack cache directories" + Remove-DirectoriesByName ".rollup.cache" "Rollup cache directories" + + # Test and coverage + Remove-DirectoriesByName "coverage" "test coverage directories" + Remove-DirectoriesByName ".nyc_output" "NYC coverage directories" + Remove-DirectoriesByName ".jest" "Jest cache directories" + Remove-DirectoriesByName ".vitest" "Vitest cache directories" + + # Storybook + Remove-DirectoriesByName ".storybook-out" "Storybook build directories" + Remove-DirectoriesByName "storybook-static" "Storybook static directories" + + # Temporary and log directories + Remove-DirectoriesByName "tmp" "temporary directories" + Remove-DirectoriesByName "temp" "temp directories" + Remove-DirectoriesByName ".tmp" "hidden temp directories" + Remove-DirectoriesByName "logs" "log directories" + Remove-DirectoriesByName ".logs" "hidden log directories" + + # Project specific (from .gitignore) + Remove-DirectoriesByName ".data" "data directories" + Remove-DirectoriesByName ".backtest-results" "backtest result directories" + Remove-DirectoriesByName ".old" "old backup directories" + Remove-DirectoriesByName ".mongo" "MongoDB data directories" + Remove-DirectoriesByName ".chat" "chat directories" + + Write-Host "Removing lock files..." -ForegroundColor Blue + Remove-Item -Path "bun.lockb" -Force -ErrorAction SilentlyContinue + Remove-Item -Path "package-lock.json" -Force -ErrorAction SilentlyContinue + Remove-Item -Path "yarn.lock" -Force -ErrorAction SilentlyContinue + Remove-Item -Path "pnpm-lock.yaml" -Force -ErrorAction SilentlyContinue + Get-ChildItem -Path . -Name "bun.lockb" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue + Get-ChildItem -Path . -Name "package-lock.json" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue + Get-ChildItem -Path . -Name "yarn.lock" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue + Get-ChildItem -Path . -Name "pnpm-lock.yaml" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue + + # TypeScript and build files + Remove-FilesByPattern "*.tsbuildinfo" "TypeScript build info files" + Remove-FilesByPattern ".eslintcache" "ESLint cache files" + Remove-FilesByPattern ".stylelintcache" "Stylelint cache files" + Remove-FilesByPattern ".prettiercache" "Prettier cache files" + Remove-FilesByPattern "*.d.ts" "TypeScript build info files" + + # Database files + Remove-FilesByPattern "*.db" "database files" + Remove-FilesByPattern "*.sqlite" "SQLite database files" + Remove-FilesByPattern "*.sqlite3" "SQLite3 database files" + + # Log files + Remove-FilesByPattern "*.log" "log files" + Remove-FilesByPattern "npm-debug.log*" "npm debug logs" + Remove-FilesByPattern "yarn-debug.log*" "yarn debug logs" + Remove-FilesByPattern "yarn-error.log*" "yarn error logs" + Remove-FilesByPattern "lerna-debug.log*" "lerna debug logs" + + # OS generated files + Remove-FilesByPattern ".DS_Store" "macOS .DS_Store files" + Remove-FilesByPattern "Thumbs.db" "Windows thumbnail files" + Remove-FilesByPattern "ehthumbs.db" "Windows thumbnail cache files" + Remove-FilesByPattern "Desktop.ini" "Windows desktop files" + + Write-Host "=== NUCLEAR CLEAN COMPLETE ===" -ForegroundColor Red + Write-Host "Cleanup complete - no need for turbo clean" -ForegroundColor Blue +} +elseif ($modules) { + Remove-DirectoriesByName "node_modules" "node_modules directories" + + Write-Host "Removing lock files..." -ForegroundColor Blue + Remove-Item -Path "bun.lockb" -Force -ErrorAction SilentlyContinue + Get-ChildItem -Path . -Name "bun.lockb" -Recurse -File | Remove-Item -Force -ErrorAction SilentlyContinue +} +elseif ($dist) { + Remove-DirectoriesByName "dist" "dist directories" + Remove-DirectoriesByName ".turbo" "Turborepo cache directories" + Remove-DirectoriesByName ".next" "Next.js cache directories" + Remove-DirectoriesByName ".parcel-cache" "Parcel cache directories" + Remove-DirectoriesByName ".angular" "Angular CLI cache directories" + + Remove-FilesByPattern "*.tsbuildinfo" "TypeScript build info files" + Remove-FilesByPattern ".eslintcache" "ESLint cache files" +} +elseif ($cache) { + Remove-DirectoriesByName ".turbo" "Turborepo cache directories" + Remove-DirectoriesByName ".next" "Next.js cache directories" + Remove-DirectoriesByName ".parcel-cache" "Parcel cache directories" + Remove-DirectoriesByName ".angular" "Angular CLI cache directories" + Remove-DirectoriesByName "coverage" "test coverage directories" + Remove-DirectoriesByName ".nyc_output" "NYC coverage directories" + Remove-FilesByPattern ".eslintcache" "ESLint cache files" +} +elseif ($tsbuildinfo) { + Remove-FilesByPattern "*.tsbuildinfo" "TypeScript build info files" +} +else { + Write-Host "Running turbo clean..." -ForegroundColor Blue + # Only run turbo clean for the default case + turbo run clean +} + +if ($fresh) { + Write-Host "Installing dependencies..." -ForegroundColor Green + bun install +} + Write-Host "Cleanup complete!" -ForegroundColor Green \ No newline at end of file diff --git a/scripts/docker.ps1 b/scripts/docker.ps1 index c9f4c44..51915a9 100644 --- a/scripts/docker.ps1 +++ b/scripts/docker.ps1 @@ -1,142 +1,142 @@ -#!/usr/bin/env pwsh - -# Trading Bot Docker Management Script - -param( - [Parameter(Mandatory=$true)] - [ValidateSet("start", "stop", "restart", "status", "logs", "reset", "admin", "monitoring", "help")] - [string]$Action, - - [Parameter(Mandatory=$false)] - [string]$Service = "", - - [Parameter(Mandatory=$false)] - [switch]$Dev = $false -) - -$ComposeFiles = if ($Dev) { - "-f docker-compose.yml -f docker-compose.dev.yml" -} else { - "-f docker-compose.yml" -} - -switch ($Action) { - "start" { - Write-Host "πŸš€ Starting Trading Bot infrastructure..." -ForegroundColor Green - if ($Service) { - Invoke-Expression "docker-compose $ComposeFiles up -d $Service" - } else { - Invoke-Expression "docker-compose $ComposeFiles up -d dragonfly postgres questdb" - } - Write-Host "βœ… Infrastructure started!" -ForegroundColor Green - Write-Host "" - Write-Host "πŸ”— Access Points:" -ForegroundColor Cyan - Write-Host " Dragonfly: localhost:6379" - Write-Host " PostgreSQL: localhost:5432" - Write-Host " QuestDB Console: http://localhost:9000" - Write-Host "" - Write-Host "πŸ’‘ Use './scripts/docker.ps1 admin' to start admin interfaces" - } - - "stop" { - Write-Host "πŸ›‘ Stopping Trading Bot infrastructure..." -ForegroundColor Yellow - if ($Service) { - Invoke-Expression "docker-compose $ComposeFiles stop $Service" - } else { - Invoke-Expression "docker-compose $ComposeFiles down" - } - Write-Host "βœ… Infrastructure stopped!" -ForegroundColor Green - } - - "restart" { - Write-Host "πŸ”„ Restarting Trading Bot infrastructure..." -ForegroundColor Yellow - if ($Service) { - Invoke-Expression "docker-compose $ComposeFiles restart $Service" - } else { - Invoke-Expression "docker-compose $ComposeFiles restart" - } - Write-Host "βœ… Infrastructure restarted!" -ForegroundColor Green - } - - "status" { - Write-Host "πŸ“Š Trading Bot Infrastructure Status:" -ForegroundColor Cyan - Invoke-Expression "docker-compose $ComposeFiles ps" - Write-Host "" - Write-Host "πŸ” Health Checks:" -ForegroundColor Cyan - docker ps --filter "name=trading-bot" --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" - } - - "logs" { - if ($Service) { - Write-Host "πŸ“‹ Logs for ${Service}:" -ForegroundColor Cyan - Invoke-Expression "docker-compose $ComposeFiles logs -f $Service" - } else { - Write-Host "πŸ“‹ All service logs:" -ForegroundColor Cyan - Invoke-Expression "docker-compose $ComposeFiles logs -f" - } - } - - "reset" { - Write-Host "⚠️ Resetting Trading Bot infrastructure (will delete all data)..." -ForegroundColor Red - $confirm = Read-Host "Are you sure? Type 'yes' to confirm" - if ($confirm -eq "yes") { - Invoke-Expression "docker-compose $ComposeFiles down -v" - Write-Host "πŸ—‘οΈ Volumes removed" - Invoke-Expression "docker-compose $ComposeFiles up -d dragonfly postgres questdb" - Write-Host "βœ… Infrastructure reset complete!" -ForegroundColor Green - } else { - Write-Host "❌ Reset cancelled" -ForegroundColor Yellow - } - } - - "admin" { - Write-Host "πŸ”§ Starting admin interfaces..." -ForegroundColor Green - Invoke-Expression "docker-compose $ComposeFiles up -d redis-insight pgadmin" - Write-Host "βœ… Admin interfaces started!" -ForegroundColor Green - Write-Host "" - Write-Host "πŸ”— Admin Access:" -ForegroundColor Cyan - Write-Host " Redis Insight: http://localhost:8001" - Write-Host " PgAdmin: http://localhost:8080" - Write-Host " Email: admin@tradingbot.local" - Write-Host " Password: admin123" - } "monitoring" { - Write-Host "πŸ“Š Starting monitoring stack..." -ForegroundColor Green - Invoke-Expression "docker-compose $ComposeFiles up -d prometheus grafana loki" - Write-Host "βœ… Monitoring started!" -ForegroundColor Green - Write-Host "" - Write-Host "πŸ”— Monitoring Access:" -ForegroundColor Cyan - Write-Host " Prometheus: http://localhost:9090" - Write-Host " Grafana: http://localhost:3000" - Write-Host " Username: admin" - Write-Host " Password: admin" - Write-Host " Loki: http://localhost:3100" - } - - "help" { - Write-Host "" - Write-Host "πŸ€– Trading Bot Docker Management" -ForegroundColor Green - Write-Host "" - Write-Host "Usage: ./scripts/docker.ps1 [options]" -ForegroundColor Cyan - Write-Host "" - Write-Host "Actions:" -ForegroundColor Yellow - Write-Host " start Start infrastructure services" - Write-Host " stop Stop infrastructure services" - Write-Host " restart Restart infrastructure services" - Write-Host " status Show service status" - Write-Host " logs Show service logs" - Write-Host " reset Reset all data (destructive)" - Write-Host " admin Start admin interfaces" - Write-Host " monitoring Start monitoring stack" - Write-Host " help Show this help" - Write-Host "" - Write-Host "Options:" -ForegroundColor Yellow - Write-Host " -Service Specify a specific service" - Write-Host " -Dev Use development configuration" - Write-Host "" - Write-Host "Examples:" -ForegroundColor Cyan - Write-Host " ./scripts/docker.ps1 start" - Write-Host " ./scripts/docker.ps1 start -Dev" - Write-Host " ./scripts/docker.ps1 logs -Service dragonfly" - Write-Host " ./scripts/docker.ps1 admin" - } -} +#!/usr/bin/env pwsh + +# Trading Bot Docker Management Script + +param( + [Parameter(Mandatory=$true)] + [ValidateSet("start", "stop", "restart", "status", "logs", "reset", "admin", "monitoring", "help")] + [string]$Action, + + [Parameter(Mandatory=$false)] + [string]$Service = "", + + [Parameter(Mandatory=$false)] + [switch]$Dev = $false +) + +$ComposeFiles = if ($Dev) { + "-f docker-compose.yml -f docker-compose.dev.yml" +} else { + "-f docker-compose.yml" +} + +switch ($Action) { + "start" { + Write-Host "πŸš€ Starting Trading Bot infrastructure..." -ForegroundColor Green + if ($Service) { + Invoke-Expression "docker-compose $ComposeFiles up -d $Service" + } else { + Invoke-Expression "docker-compose $ComposeFiles up -d dragonfly postgres questdb" + } + Write-Host "βœ… Infrastructure started!" -ForegroundColor Green + Write-Host "" + Write-Host "πŸ”— Access Points:" -ForegroundColor Cyan + Write-Host " Dragonfly: localhost:6379" + Write-Host " PostgreSQL: localhost:5432" + Write-Host " QuestDB Console: http://localhost:9000" + Write-Host "" + Write-Host "πŸ’‘ Use './scripts/docker.ps1 admin' to start admin interfaces" + } + + "stop" { + Write-Host "πŸ›‘ Stopping Trading Bot infrastructure..." -ForegroundColor Yellow + if ($Service) { + Invoke-Expression "docker-compose $ComposeFiles stop $Service" + } else { + Invoke-Expression "docker-compose $ComposeFiles down" + } + Write-Host "βœ… Infrastructure stopped!" -ForegroundColor Green + } + + "restart" { + Write-Host "πŸ”„ Restarting Trading Bot infrastructure..." -ForegroundColor Yellow + if ($Service) { + Invoke-Expression "docker-compose $ComposeFiles restart $Service" + } else { + Invoke-Expression "docker-compose $ComposeFiles restart" + } + Write-Host "βœ… Infrastructure restarted!" -ForegroundColor Green + } + + "status" { + Write-Host "πŸ“Š Trading Bot Infrastructure Status:" -ForegroundColor Cyan + Invoke-Expression "docker-compose $ComposeFiles ps" + Write-Host "" + Write-Host "πŸ” Health Checks:" -ForegroundColor Cyan + docker ps --filter "name=trading-bot" --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}" + } + + "logs" { + if ($Service) { + Write-Host "πŸ“‹ Logs for ${Service}:" -ForegroundColor Cyan + Invoke-Expression "docker-compose $ComposeFiles logs -f $Service" + } else { + Write-Host "πŸ“‹ All service logs:" -ForegroundColor Cyan + Invoke-Expression "docker-compose $ComposeFiles logs -f" + } + } + + "reset" { + Write-Host "⚠️ Resetting Trading Bot infrastructure (will delete all data)..." -ForegroundColor Red + $confirm = Read-Host "Are you sure? Type 'yes' to confirm" + if ($confirm -eq "yes") { + Invoke-Expression "docker-compose $ComposeFiles down -v" + Write-Host "πŸ—‘οΈ Volumes removed" + Invoke-Expression "docker-compose $ComposeFiles up -d dragonfly postgres questdb" + Write-Host "βœ… Infrastructure reset complete!" -ForegroundColor Green + } else { + Write-Host "❌ Reset cancelled" -ForegroundColor Yellow + } + } + + "admin" { + Write-Host "πŸ”§ Starting admin interfaces..." -ForegroundColor Green + Invoke-Expression "docker-compose $ComposeFiles up -d redis-insight pgadmin" + Write-Host "βœ… Admin interfaces started!" -ForegroundColor Green + Write-Host "" + Write-Host "πŸ”— Admin Access:" -ForegroundColor Cyan + Write-Host " Redis Insight: http://localhost:8001" + Write-Host " PgAdmin: http://localhost:8080" + Write-Host " Email: admin@tradingbot.local" + Write-Host " Password: admin123" + } "monitoring" { + Write-Host "πŸ“Š Starting monitoring stack..." -ForegroundColor Green + Invoke-Expression "docker-compose $ComposeFiles up -d prometheus grafana loki" + Write-Host "βœ… Monitoring started!" -ForegroundColor Green + Write-Host "" + Write-Host "πŸ”— Monitoring Access:" -ForegroundColor Cyan + Write-Host " Prometheus: http://localhost:9090" + Write-Host " Grafana: http://localhost:3000" + Write-Host " Username: admin" + Write-Host " Password: admin" + Write-Host " Loki: http://localhost:3100" + } + + "help" { + Write-Host "" + Write-Host "πŸ€– Trading Bot Docker Management" -ForegroundColor Green + Write-Host "" + Write-Host "Usage: ./scripts/docker.ps1 [options]" -ForegroundColor Cyan + Write-Host "" + Write-Host "Actions:" -ForegroundColor Yellow + Write-Host " start Start infrastructure services" + Write-Host " stop Stop infrastructure services" + Write-Host " restart Restart infrastructure services" + Write-Host " status Show service status" + Write-Host " logs Show service logs" + Write-Host " reset Reset all data (destructive)" + Write-Host " admin Start admin interfaces" + Write-Host " monitoring Start monitoring stack" + Write-Host " help Show this help" + Write-Host "" + Write-Host "Options:" -ForegroundColor Yellow + Write-Host " -Service Specify a specific service" + Write-Host " -Dev Use development configuration" + Write-Host "" + Write-Host "Examples:" -ForegroundColor Cyan + Write-Host " ./scripts/docker.ps1 start" + Write-Host " ./scripts/docker.ps1 start -Dev" + Write-Host " ./scripts/docker.ps1 logs -Service dragonfly" + Write-Host " ./scripts/docker.ps1 admin" + } +} diff --git a/scripts/verify-build.ps1 b/scripts/verify-build.ps1 index 20e2fad..a2b6a23 100644 --- a/scripts/verify-build.ps1 +++ b/scripts/verify-build.ps1 @@ -1,94 +1,94 @@ -param( - [switch]$verbose -) - -Write-Host "=== Verifying Build Health ===" -ForegroundColor Green - -$errors = @() -$warnings = @() - -# Check for common build issues -Write-Host "Checking for common build issues..." -ForegroundColor Yellow - -# Check for mismatched .d.ts files in source directories -Write-Host "Checking for .d.ts files in source directories..." -ForegroundColor Blue -$sourceDtsFiles = Get-ChildItem -Path ".\libs\*\src\**\*.d.ts" -Recurse -ErrorAction SilentlyContinue -if ($sourceDtsFiles.Count -gt 0) { - $warnings += "Found .d.ts files in source directories:" - foreach ($file in $sourceDtsFiles) { - $warnings += " - $($file.FullName)" - } -} - -# Check for missing dist directories after build -Write-Host "Checking for missing dist directories..." -ForegroundColor Blue -$libraries = @("types", "config", "logger", "utils", "cache", "http") -foreach ($lib in $libraries) { - $distPath = "libs\$lib\dist" - if (-not (Test-Path $distPath)) { - $errors += "Missing dist directory for $lib" - } else { - $indexFile = "$distPath\index.js" - if (-not (Test-Path $indexFile)) { - $errors += "Missing index.js in $lib dist directory" - } - } -} - -# Check for stale tsbuildinfo files -Write-Host "Checking for stale tsbuildinfo files..." -ForegroundColor Blue -$tsbuildFiles = Get-ChildItem -Path ".\**\*.tsbuildinfo" -Recurse -ErrorAction SilentlyContinue -if ($tsbuildFiles.Count -gt 0) { - $warnings += "Found stale .tsbuildinfo files:" - foreach ($file in $tsbuildFiles) { - $warnings += " - $($file.FullName)" - } -} - -# Check package.json dependencies -Write-Host "Checking package.json files..." -ForegroundColor Blue -$packageFiles = Get-ChildItem -Path ".\**\package.json" -Recurse -ErrorAction SilentlyContinue -foreach ($packageFile in $packageFiles) { - try { - $packageContent = Get-Content $packageFile.FullName | ConvertFrom-Json - if (-not $packageContent.name) { - $errors += "Package.json missing name: $($packageFile.FullName)" - } - if (-not $packageContent.version) { - $warnings += "Package.json missing version: $($packageFile.FullName)" - } - } catch { - $errors += "Invalid package.json: $($packageFile.FullName)" - } -} - -# Report results -Write-Host "`n=== Build Health Report ===" -ForegroundColor Green - -if ($errors.Count -gt 0) { - Write-Host "❌ ERRORS FOUND:" -ForegroundColor Red - foreach ($error in $errors) { - Write-Host " $error" -ForegroundColor Red - } -} - -if ($warnings.Count -gt 0) { - Write-Host "⚠ WARNINGS:" -ForegroundColor Yellow - foreach ($warning in $warnings) { - Write-Host " $warning" -ForegroundColor Yellow - } -} - -if ($errors.Count -eq 0 -and $warnings.Count -eq 0) { - Write-Host "βœ… No issues found - build environment is healthy!" -ForegroundColor Green -} elseif ($errors.Count -eq 0) { - Write-Host "βœ… No critical errors found (only warnings)" -ForegroundColor Green -} else { - Write-Host "❌ Critical errors found - build may fail" -ForegroundColor Red - exit 1 -} - -Write-Host "`nRecommended commands:" -ForegroundColor Cyan -Write-Host " Clean build: bun run reset" -ForegroundColor Gray -Write-Host " Quick build: bun run build" -ForegroundColor Gray -Write-Host " Clean only: bun run clean:dist" -ForegroundColor Gray +param( + [switch]$verbose +) + +Write-Host "=== Verifying Build Health ===" -ForegroundColor Green + +$errors = @() +$warnings = @() + +# Check for common build issues +Write-Host "Checking for common build issues..." -ForegroundColor Yellow + +# Check for mismatched .d.ts files in source directories +Write-Host "Checking for .d.ts files in source directories..." -ForegroundColor Blue +$sourceDtsFiles = Get-ChildItem -Path ".\libs\*\src\**\*.d.ts" -Recurse -ErrorAction SilentlyContinue +if ($sourceDtsFiles.Count -gt 0) { + $warnings += "Found .d.ts files in source directories:" + foreach ($file in $sourceDtsFiles) { + $warnings += " - $($file.FullName)" + } +} + +# Check for missing dist directories after build +Write-Host "Checking for missing dist directories..." -ForegroundColor Blue +$libraries = @("types", "config", "logger", "utils", "cache", "http") +foreach ($lib in $libraries) { + $distPath = "libs\$lib\dist" + if (-not (Test-Path $distPath)) { + $errors += "Missing dist directory for $lib" + } else { + $indexFile = "$distPath\index.js" + if (-not (Test-Path $indexFile)) { + $errors += "Missing index.js in $lib dist directory" + } + } +} + +# Check for stale tsbuildinfo files +Write-Host "Checking for stale tsbuildinfo files..." -ForegroundColor Blue +$tsbuildFiles = Get-ChildItem -Path ".\**\*.tsbuildinfo" -Recurse -ErrorAction SilentlyContinue +if ($tsbuildFiles.Count -gt 0) { + $warnings += "Found stale .tsbuildinfo files:" + foreach ($file in $tsbuildFiles) { + $warnings += " - $($file.FullName)" + } +} + +# Check package.json dependencies +Write-Host "Checking package.json files..." -ForegroundColor Blue +$packageFiles = Get-ChildItem -Path ".\**\package.json" -Recurse -ErrorAction SilentlyContinue +foreach ($packageFile in $packageFiles) { + try { + $packageContent = Get-Content $packageFile.FullName | ConvertFrom-Json + if (-not $packageContent.name) { + $errors += "Package.json missing name: $($packageFile.FullName)" + } + if (-not $packageContent.version) { + $warnings += "Package.json missing version: $($packageFile.FullName)" + } + } catch { + $errors += "Invalid package.json: $($packageFile.FullName)" + } +} + +# Report results +Write-Host "`n=== Build Health Report ===" -ForegroundColor Green + +if ($errors.Count -gt 0) { + Write-Host "❌ ERRORS FOUND:" -ForegroundColor Red + foreach ($error in $errors) { + Write-Host " $error" -ForegroundColor Red + } +} + +if ($warnings.Count -gt 0) { + Write-Host "⚠ WARNINGS:" -ForegroundColor Yellow + foreach ($warning in $warnings) { + Write-Host " $warning" -ForegroundColor Yellow + } +} + +if ($errors.Count -eq 0 -and $warnings.Count -eq 0) { + Write-Host "βœ… No issues found - build environment is healthy!" -ForegroundColor Green +} elseif ($errors.Count -eq 0) { + Write-Host "βœ… No critical errors found (only warnings)" -ForegroundColor Green +} else { + Write-Host "❌ Critical errors found - build may fail" -ForegroundColor Red + exit 1 +} + +Write-Host "`nRecommended commands:" -ForegroundColor Cyan +Write-Host " Clean build: bun run reset" -ForegroundColor Gray +Write-Host " Quick build: bun run build" -ForegroundColor Gray +Write-Host " Clean only: bun run clean:dist" -ForegroundColor Gray diff --git a/test/integration/setup.js b/test/integration/setup.js index 6495b25..a565d5f 100644 --- a/test/integration/setup.js +++ b/test/integration/setup.js @@ -1,183 +1,183 @@ -/** - * Integration Test Setup - * - * Sets up test containers and real database instances for integration testing. - * This file is executed before integration tests run. - */ -import { GenericContainer } from 'testcontainers'; -import { MongoMemoryServer } from 'mongodb-memory-server'; -let questdbContainer; -let postgresContainer; -let mongoContainer; -let mongoMemoryServer; -/** - * Global setup for integration tests - * Starts real database containers for testing - */ -beforeAll(async () => { - console.log('πŸš€ Starting integration test containers...'); - try { - // Start QuestDB container - console.log('πŸ“Š Starting QuestDB container...'); - questdbContainer = await new GenericContainer('questdb/questdb:7.3.10') - .withExposedPorts(9000, 8812, 9009) - .withEnvironment({ - 'QDB_TELEMETRY_ENABLED': 'false', - 'QDB_LOG_LEVEL': 'ERROR' - }) - .withStartupTimeout(60000) - .start(); - // Start PostgreSQL container - console.log('🐘 Starting PostgreSQL container...'); - postgresContainer = await new GenericContainer('postgres:15-alpine') - .withExposedPorts(5432) - .withEnvironment({ - 'POSTGRES_DB': 'trading_bot_test', - 'POSTGRES_USER': 'trading_admin', - 'POSTGRES_PASSWORD': 'trading_pass_test' - }) - .withStartupTimeout(60000) - .start(); - // Start MongoDB container - console.log('πŸƒ Starting MongoDB container...'); - mongoContainer = await new GenericContainer('mongo:7-jammy') - .withExposedPorts(27017) - .withEnvironment({ - 'MONGO_INITDB_ROOT_USERNAME': 'trading_admin', - 'MONGO_INITDB_ROOT_PASSWORD': 'trading_mongo_test', - 'MONGO_INITDB_DATABASE': 'trading_bot_test' - }) - .withStartupTimeout(60000) - .start(); - // Update environment variables for tests - process.env.QUESTDB_HOST = questdbContainer.getHost(); - process.env.QUESTDB_HTTP_PORT = questdbContainer.getMappedPort(9000).toString(); - process.env.QUESTDB_PG_PORT = questdbContainer.getMappedPort(8812).toString(); - process.env.QUESTDB_INFLUX_PORT = questdbContainer.getMappedPort(9009).toString(); - process.env.POSTGRES_HOST = postgresContainer.getHost(); - process.env.POSTGRES_PORT = postgresContainer.getMappedPort(5432).toString(); - process.env.MONGODB_HOST = mongoContainer.getHost(); - process.env.MONGODB_PORT = mongoContainer.getMappedPort(27017).toString(); - console.log('βœ… All containers started successfully!'); - console.log(`πŸ“Š QuestDB: http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}`); - console.log(`🐘 PostgreSQL: ${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT}`); - console.log(`πŸƒ MongoDB: ${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}`); - } - catch (error) { - console.error('❌ Failed to start test containers:', error); - // Try to use MongoDB Memory Server as fallback - console.log('πŸ”„ Falling back to MongoDB Memory Server...'); - try { - mongoMemoryServer = await MongoMemoryServer.create({ - instance: { - dbName: 'trading_bot_test' - } - }); - const mongoUri = mongoMemoryServer.getUri(); - const mongoUrl = new URL(mongoUri); - process.env.MONGODB_HOST = mongoUrl.hostname; - process.env.MONGODB_PORT = mongoUrl.port; - process.env.MONGODB_URI = mongoUri; - console.log('βœ… MongoDB Memory Server started as fallback'); - } - catch (fallbackError) { - console.error('❌ Failed to start MongoDB Memory Server:', fallbackError); - throw fallbackError; - } - // For other databases, use localhost defaults if containers fail - if (!questdbContainer) { - console.log('⚠️ Using localhost QuestDB (ensure it\'s running)'); - process.env.QUESTDB_HOST = 'localhost'; - process.env.QUESTDB_HTTP_PORT = '9000'; - process.env.QUESTDB_PG_PORT = '8812'; - process.env.QUESTDB_INFLUX_PORT = '9009'; - } - if (!postgresContainer) { - console.log('⚠️ Using localhost PostgreSQL (ensure it\'s running)'); - process.env.POSTGRES_HOST = 'localhost'; - process.env.POSTGRES_PORT = '5432'; - } - } -}, 120000); // 2 minutes timeout for container startup -/** - * Global cleanup for integration tests - * Stops all test containers - */ -afterAll(async () => { - console.log('🧹 Cleaning up integration test containers...'); - const cleanup = async (container, name) => { - if (container) { - try { - await container.stop(); - console.log(`βœ… ${name} container stopped`); - } - catch (error) { - console.warn(`⚠️ Failed to stop ${name} container:`, error); - } - } - }; - await Promise.all([ - cleanup(questdbContainer, 'QuestDB'), - cleanup(postgresContainer, 'PostgreSQL'), - cleanup(mongoContainer, 'MongoDB') - ]); - if (mongoMemoryServer) { - try { - await mongoMemoryServer.stop(); - console.log('βœ… MongoDB Memory Server stopped'); - } - catch (error) { - console.warn('⚠️ Failed to stop MongoDB Memory Server:', error); - } - } - console.log('πŸŽ‰ Integration test cleanup complete!'); -}, 30000); -/** - * Wait for database services to be ready - */ -export const waitForServices = async (timeout = 30000) => { - const start = Date.now(); - while (Date.now() - start < timeout) { - try { - // Check if QuestDB HTTP interface is ready - const questdbUrl = `http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}/status`; - const response = await fetch(questdbUrl); - if (response.ok) { - console.log('βœ… QuestDB is ready'); - return; - } - } - catch (error) { - // Service not ready yet, continue waiting - } - await new Promise(resolve => setTimeout(resolve, 1000)); - } - throw new Error('Services did not become ready within timeout'); -}; -/** - * Test utilities for integration tests - */ -export const integrationTestHelpers = { - /** - * Get QuestDB HTTP URL - */ - getQuestDBUrl: () => `http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}`, - /** - * Get PostgreSQL connection string - */ - getPostgresUrl: () => `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}`, - /** - * Get MongoDB connection string - */ - getMongoUrl: () => { - if (process.env.MONGODB_URI) { - return process.env.MONGODB_URI; - } - return `mongodb://${process.env.MONGODB_USERNAME}:${process.env.MONGODB_PASSWORD}@${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}/${process.env.MONGODB_DATABASE}`; - }, - /** - * Wait for services to be ready - */ - waitForServices -}; +/** + * Integration Test Setup + * + * Sets up test containers and real database instances for integration testing. + * This file is executed before integration tests run. + */ +import { GenericContainer } from 'testcontainers'; +import { MongoMemoryServer } from 'mongodb-memory-server'; +let questdbContainer; +let postgresContainer; +let mongoContainer; +let mongoMemoryServer; +/** + * Global setup for integration tests + * Starts real database containers for testing + */ +beforeAll(async () => { + console.log('πŸš€ Starting integration test containers...'); + try { + // Start QuestDB container + console.log('πŸ“Š Starting QuestDB container...'); + questdbContainer = await new GenericContainer('questdb/questdb:7.3.10') + .withExposedPorts(9000, 8812, 9009) + .withEnvironment({ + 'QDB_TELEMETRY_ENABLED': 'false', + 'QDB_LOG_LEVEL': 'ERROR' + }) + .withStartupTimeout(60000) + .start(); + // Start PostgreSQL container + console.log('🐘 Starting PostgreSQL container...'); + postgresContainer = await new GenericContainer('postgres:15-alpine') + .withExposedPorts(5432) + .withEnvironment({ + 'POSTGRES_DB': 'trading_bot_test', + 'POSTGRES_USER': 'trading_admin', + 'POSTGRES_PASSWORD': 'trading_pass_test' + }) + .withStartupTimeout(60000) + .start(); + // Start MongoDB container + console.log('πŸƒ Starting MongoDB container...'); + mongoContainer = await new GenericContainer('mongo:7-jammy') + .withExposedPorts(27017) + .withEnvironment({ + 'MONGO_INITDB_ROOT_USERNAME': 'trading_admin', + 'MONGO_INITDB_ROOT_PASSWORD': 'trading_mongo_test', + 'MONGO_INITDB_DATABASE': 'trading_bot_test' + }) + .withStartupTimeout(60000) + .start(); + // Update environment variables for tests + process.env.QUESTDB_HOST = questdbContainer.getHost(); + process.env.QUESTDB_HTTP_PORT = questdbContainer.getMappedPort(9000).toString(); + process.env.QUESTDB_PG_PORT = questdbContainer.getMappedPort(8812).toString(); + process.env.QUESTDB_INFLUX_PORT = questdbContainer.getMappedPort(9009).toString(); + process.env.POSTGRES_HOST = postgresContainer.getHost(); + process.env.POSTGRES_PORT = postgresContainer.getMappedPort(5432).toString(); + process.env.MONGODB_HOST = mongoContainer.getHost(); + process.env.MONGODB_PORT = mongoContainer.getMappedPort(27017).toString(); + console.log('βœ… All containers started successfully!'); + console.log(`πŸ“Š QuestDB: http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}`); + console.log(`🐘 PostgreSQL: ${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT}`); + console.log(`πŸƒ MongoDB: ${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}`); + } + catch (error) { + console.error('❌ Failed to start test containers:', error); + // Try to use MongoDB Memory Server as fallback + console.log('πŸ”„ Falling back to MongoDB Memory Server...'); + try { + mongoMemoryServer = await MongoMemoryServer.create({ + instance: { + dbName: 'trading_bot_test' + } + }); + const mongoUri = mongoMemoryServer.getUri(); + const mongoUrl = new URL(mongoUri); + process.env.MONGODB_HOST = mongoUrl.hostname; + process.env.MONGODB_PORT = mongoUrl.port; + process.env.MONGODB_URI = mongoUri; + console.log('βœ… MongoDB Memory Server started as fallback'); + } + catch (fallbackError) { + console.error('❌ Failed to start MongoDB Memory Server:', fallbackError); + throw fallbackError; + } + // For other databases, use localhost defaults if containers fail + if (!questdbContainer) { + console.log('⚠️ Using localhost QuestDB (ensure it\'s running)'); + process.env.QUESTDB_HOST = 'localhost'; + process.env.QUESTDB_HTTP_PORT = '9000'; + process.env.QUESTDB_PG_PORT = '8812'; + process.env.QUESTDB_INFLUX_PORT = '9009'; + } + if (!postgresContainer) { + console.log('⚠️ Using localhost PostgreSQL (ensure it\'s running)'); + process.env.POSTGRES_HOST = 'localhost'; + process.env.POSTGRES_PORT = '5432'; + } + } +}, 120000); // 2 minutes timeout for container startup +/** + * Global cleanup for integration tests + * Stops all test containers + */ +afterAll(async () => { + console.log('🧹 Cleaning up integration test containers...'); + const cleanup = async (container, name) => { + if (container) { + try { + await container.stop(); + console.log(`βœ… ${name} container stopped`); + } + catch (error) { + console.warn(`⚠️ Failed to stop ${name} container:`, error); + } + } + }; + await Promise.all([ + cleanup(questdbContainer, 'QuestDB'), + cleanup(postgresContainer, 'PostgreSQL'), + cleanup(mongoContainer, 'MongoDB') + ]); + if (mongoMemoryServer) { + try { + await mongoMemoryServer.stop(); + console.log('βœ… MongoDB Memory Server stopped'); + } + catch (error) { + console.warn('⚠️ Failed to stop MongoDB Memory Server:', error); + } + } + console.log('πŸŽ‰ Integration test cleanup complete!'); +}, 30000); +/** + * Wait for database services to be ready + */ +export const waitForServices = async (timeout = 30000) => { + const start = Date.now(); + while (Date.now() - start < timeout) { + try { + // Check if QuestDB HTTP interface is ready + const questdbUrl = `http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}/status`; + const response = await fetch(questdbUrl); + if (response.ok) { + console.log('βœ… QuestDB is ready'); + return; + } + } + catch (error) { + // Service not ready yet, continue waiting + } + await new Promise(resolve => setTimeout(resolve, 1000)); + } + throw new Error('Services did not become ready within timeout'); +}; +/** + * Test utilities for integration tests + */ +export const integrationTestHelpers = { + /** + * Get QuestDB HTTP URL + */ + getQuestDBUrl: () => `http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}`, + /** + * Get PostgreSQL connection string + */ + getPostgresUrl: () => `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}`, + /** + * Get MongoDB connection string + */ + getMongoUrl: () => { + if (process.env.MONGODB_URI) { + return process.env.MONGODB_URI; + } + return `mongodb://${process.env.MONGODB_USERNAME}:${process.env.MONGODB_PASSWORD}@${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}/${process.env.MONGODB_DATABASE}`; + }, + /** + * Wait for services to be ready + */ + waitForServices +}; //# sourceMappingURL=setup.js.map \ No newline at end of file diff --git a/test/integration/setup.ts b/test/integration/setup.ts index 856f71a..e448556 100644 --- a/test/integration/setup.ts +++ b/test/integration/setup.ts @@ -1,208 +1,208 @@ -/** - * Integration Test Setup - * - * Sets up test containers and real database instances for integration testing. - * This file is executed before integration tests run. - */ - -import { GenericContainer, StartedTestContainer } from 'testcontainers'; -import { MongoMemoryServer } from 'mongodb-memory-server'; - -let questdbContainer: StartedTestContainer; -let postgresContainer: StartedTestContainer; -let mongoContainer: StartedTestContainer; -let mongoMemoryServer: MongoMemoryServer; - -/** - * Global setup for integration tests - * Starts real database containers for testing - */ -beforeAll(async () => { - console.log('πŸš€ Starting integration test containers...'); - - try { - // Start QuestDB container - console.log('πŸ“Š Starting QuestDB container...'); - questdbContainer = await new GenericContainer('questdb/questdb:7.3.10') - .withExposedPorts(9000, 8812, 9009) - .withEnvironment({ - 'QDB_TELEMETRY_ENABLED': 'false', - 'QDB_LOG_LEVEL': 'ERROR' - }) - .withStartupTimeout(60000) - .start(); - - // Start PostgreSQL container - console.log('🐘 Starting PostgreSQL container...'); - postgresContainer = await new GenericContainer('postgres:15-alpine') - .withExposedPorts(5432) - .withEnvironment({ - 'POSTGRES_DB': 'trading_bot_test', - 'POSTGRES_USER': 'trading_admin', - 'POSTGRES_PASSWORD': 'trading_pass_test' - }) - .withStartupTimeout(60000) - .start(); - - // Start MongoDB container - console.log('πŸƒ Starting MongoDB container...'); - mongoContainer = await new GenericContainer('mongo:7-jammy') - .withExposedPorts(27017) - .withEnvironment({ - 'MONGO_INITDB_ROOT_USERNAME': 'trading_admin', - 'MONGO_INITDB_ROOT_PASSWORD': 'trading_mongo_test', - 'MONGO_INITDB_DATABASE': 'trading_bot_test' - }) - .withStartupTimeout(60000) - .start(); - - // Update environment variables for tests - process.env.QUESTDB_HOST = questdbContainer.getHost(); - process.env.QUESTDB_HTTP_PORT = questdbContainer.getMappedPort(9000).toString(); - process.env.QUESTDB_PG_PORT = questdbContainer.getMappedPort(8812).toString(); - process.env.QUESTDB_INFLUX_PORT = questdbContainer.getMappedPort(9009).toString(); - - process.env.POSTGRES_HOST = postgresContainer.getHost(); - process.env.POSTGRES_PORT = postgresContainer.getMappedPort(5432).toString(); - - process.env.MONGODB_HOST = mongoContainer.getHost(); - process.env.MONGODB_PORT = mongoContainer.getMappedPort(27017).toString(); - - console.log('βœ… All containers started successfully!'); - console.log(`πŸ“Š QuestDB: http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}`); - console.log(`🐘 PostgreSQL: ${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT}`); - console.log(`πŸƒ MongoDB: ${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}`); - - } catch (error) { - console.error('❌ Failed to start test containers:', error); - - // Try to use MongoDB Memory Server as fallback - console.log('πŸ”„ Falling back to MongoDB Memory Server...'); - try { - mongoMemoryServer = await MongoMemoryServer.create({ - instance: { - dbName: 'trading_bot_test' - } - }); - - const mongoUri = mongoMemoryServer.getUri(); - const mongoUrl = new URL(mongoUri); - process.env.MONGODB_HOST = mongoUrl.hostname; - process.env.MONGODB_PORT = mongoUrl.port; - process.env.MONGODB_URI = mongoUri; - - console.log('βœ… MongoDB Memory Server started as fallback'); - } catch (fallbackError) { - console.error('❌ Failed to start MongoDB Memory Server:', fallbackError); - throw fallbackError; - } - - // For other databases, use localhost defaults if containers fail - if (!questdbContainer) { - console.log('⚠️ Using localhost QuestDB (ensure it\'s running)'); - process.env.QUESTDB_HOST = 'localhost'; - process.env.QUESTDB_HTTP_PORT = '9000'; - process.env.QUESTDB_PG_PORT = '8812'; - process.env.QUESTDB_INFLUX_PORT = '9009'; - } - - if (!postgresContainer) { - console.log('⚠️ Using localhost PostgreSQL (ensure it\'s running)'); - process.env.POSTGRES_HOST = 'localhost'; - process.env.POSTGRES_PORT = '5432'; - } - } -}, 120000); // 2 minutes timeout for container startup - -/** - * Global cleanup for integration tests - * Stops all test containers - */ -afterAll(async () => { - console.log('🧹 Cleaning up integration test containers...'); - - const cleanup = async (container: StartedTestContainer | undefined, name: string) => { - if (container) { - try { - await container.stop(); - console.log(`βœ… ${name} container stopped`); - } catch (error) { - console.warn(`⚠️ Failed to stop ${name} container:`, error); - } - } - }; - - await Promise.all([ - cleanup(questdbContainer, 'QuestDB'), - cleanup(postgresContainer, 'PostgreSQL'), - cleanup(mongoContainer, 'MongoDB') - ]); - - if (mongoMemoryServer) { - try { - await mongoMemoryServer.stop(); - console.log('βœ… MongoDB Memory Server stopped'); - } catch (error) { - console.warn('⚠️ Failed to stop MongoDB Memory Server:', error); - } - } - - console.log('πŸŽ‰ Integration test cleanup complete!'); -}, 30000); - -/** - * Wait for database services to be ready - */ -export const waitForServices = async (timeout: number = 30000): Promise => { - const start = Date.now(); - - while (Date.now() - start < timeout) { - try { - // Check if QuestDB HTTP interface is ready - const questdbUrl = `http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}/status`; - const response = await fetch(questdbUrl); - - if (response.ok) { - console.log('βœ… QuestDB is ready'); - return; - } - } catch (error) { - // Service not ready yet, continue waiting - } - - await new Promise(resolve => setTimeout(resolve, 1000)); - } - - throw new Error('Services did not become ready within timeout'); -}; - -/** - * Test utilities for integration tests - */ -export const integrationTestHelpers = { - /** - * Get QuestDB HTTP URL - */ - getQuestDBUrl: () => `http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}`, - - /** - * Get PostgreSQL connection string - */ - getPostgresUrl: () => - `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}`, - - /** - * Get MongoDB connection string - */ - getMongoUrl: () => { - if (process.env.MONGODB_URI) { - return process.env.MONGODB_URI; - } - return `mongodb://${process.env.MONGODB_USERNAME}:${process.env.MONGODB_PASSWORD}@${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}/${process.env.MONGODB_DATABASE}`; - }, - - /** - * Wait for services to be ready - */ - waitForServices -}; +/** + * Integration Test Setup + * + * Sets up test containers and real database instances for integration testing. + * This file is executed before integration tests run. + */ + +import { GenericContainer, StartedTestContainer } from 'testcontainers'; +import { MongoMemoryServer } from 'mongodb-memory-server'; + +let questdbContainer: StartedTestContainer; +let postgresContainer: StartedTestContainer; +let mongoContainer: StartedTestContainer; +let mongoMemoryServer: MongoMemoryServer; + +/** + * Global setup for integration tests + * Starts real database containers for testing + */ +beforeAll(async () => { + console.log('πŸš€ Starting integration test containers...'); + + try { + // Start QuestDB container + console.log('πŸ“Š Starting QuestDB container...'); + questdbContainer = await new GenericContainer('questdb/questdb:7.3.10') + .withExposedPorts(9000, 8812, 9009) + .withEnvironment({ + 'QDB_TELEMETRY_ENABLED': 'false', + 'QDB_LOG_LEVEL': 'ERROR' + }) + .withStartupTimeout(60000) + .start(); + + // Start PostgreSQL container + console.log('🐘 Starting PostgreSQL container...'); + postgresContainer = await new GenericContainer('postgres:15-alpine') + .withExposedPorts(5432) + .withEnvironment({ + 'POSTGRES_DB': 'trading_bot_test', + 'POSTGRES_USER': 'trading_admin', + 'POSTGRES_PASSWORD': 'trading_pass_test' + }) + .withStartupTimeout(60000) + .start(); + + // Start MongoDB container + console.log('πŸƒ Starting MongoDB container...'); + mongoContainer = await new GenericContainer('mongo:7-jammy') + .withExposedPorts(27017) + .withEnvironment({ + 'MONGO_INITDB_ROOT_USERNAME': 'trading_admin', + 'MONGO_INITDB_ROOT_PASSWORD': 'trading_mongo_test', + 'MONGO_INITDB_DATABASE': 'trading_bot_test' + }) + .withStartupTimeout(60000) + .start(); + + // Update environment variables for tests + process.env.QUESTDB_HOST = questdbContainer.getHost(); + process.env.QUESTDB_HTTP_PORT = questdbContainer.getMappedPort(9000).toString(); + process.env.QUESTDB_PG_PORT = questdbContainer.getMappedPort(8812).toString(); + process.env.QUESTDB_INFLUX_PORT = questdbContainer.getMappedPort(9009).toString(); + + process.env.POSTGRES_HOST = postgresContainer.getHost(); + process.env.POSTGRES_PORT = postgresContainer.getMappedPort(5432).toString(); + + process.env.MONGODB_HOST = mongoContainer.getHost(); + process.env.MONGODB_PORT = mongoContainer.getMappedPort(27017).toString(); + + console.log('βœ… All containers started successfully!'); + console.log(`πŸ“Š QuestDB: http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}`); + console.log(`🐘 PostgreSQL: ${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT}`); + console.log(`πŸƒ MongoDB: ${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}`); + + } catch (error) { + console.error('❌ Failed to start test containers:', error); + + // Try to use MongoDB Memory Server as fallback + console.log('πŸ”„ Falling back to MongoDB Memory Server...'); + try { + mongoMemoryServer = await MongoMemoryServer.create({ + instance: { + dbName: 'trading_bot_test' + } + }); + + const mongoUri = mongoMemoryServer.getUri(); + const mongoUrl = new URL(mongoUri); + process.env.MONGODB_HOST = mongoUrl.hostname; + process.env.MONGODB_PORT = mongoUrl.port; + process.env.MONGODB_URI = mongoUri; + + console.log('βœ… MongoDB Memory Server started as fallback'); + } catch (fallbackError) { + console.error('❌ Failed to start MongoDB Memory Server:', fallbackError); + throw fallbackError; + } + + // For other databases, use localhost defaults if containers fail + if (!questdbContainer) { + console.log('⚠️ Using localhost QuestDB (ensure it\'s running)'); + process.env.QUESTDB_HOST = 'localhost'; + process.env.QUESTDB_HTTP_PORT = '9000'; + process.env.QUESTDB_PG_PORT = '8812'; + process.env.QUESTDB_INFLUX_PORT = '9009'; + } + + if (!postgresContainer) { + console.log('⚠️ Using localhost PostgreSQL (ensure it\'s running)'); + process.env.POSTGRES_HOST = 'localhost'; + process.env.POSTGRES_PORT = '5432'; + } + } +}, 120000); // 2 minutes timeout for container startup + +/** + * Global cleanup for integration tests + * Stops all test containers + */ +afterAll(async () => { + console.log('🧹 Cleaning up integration test containers...'); + + const cleanup = async (container: StartedTestContainer | undefined, name: string) => { + if (container) { + try { + await container.stop(); + console.log(`βœ… ${name} container stopped`); + } catch (error) { + console.warn(`⚠️ Failed to stop ${name} container:`, error); + } + } + }; + + await Promise.all([ + cleanup(questdbContainer, 'QuestDB'), + cleanup(postgresContainer, 'PostgreSQL'), + cleanup(mongoContainer, 'MongoDB') + ]); + + if (mongoMemoryServer) { + try { + await mongoMemoryServer.stop(); + console.log('βœ… MongoDB Memory Server stopped'); + } catch (error) { + console.warn('⚠️ Failed to stop MongoDB Memory Server:', error); + } + } + + console.log('πŸŽ‰ Integration test cleanup complete!'); +}, 30000); + +/** + * Wait for database services to be ready + */ +export const waitForServices = async (timeout: number = 30000): Promise => { + const start = Date.now(); + + while (Date.now() - start < timeout) { + try { + // Check if QuestDB HTTP interface is ready + const questdbUrl = `http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}/status`; + const response = await fetch(questdbUrl); + + if (response.ok) { + console.log('βœ… QuestDB is ready'); + return; + } + } catch (error) { + // Service not ready yet, continue waiting + } + + await new Promise(resolve => setTimeout(resolve, 1000)); + } + + throw new Error('Services did not become ready within timeout'); +}; + +/** + * Test utilities for integration tests + */ +export const integrationTestHelpers = { + /** + * Get QuestDB HTTP URL + */ + getQuestDBUrl: () => `http://${process.env.QUESTDB_HOST}:${process.env.QUESTDB_HTTP_PORT}`, + + /** + * Get PostgreSQL connection string + */ + getPostgresUrl: () => + `postgresql://${process.env.POSTGRES_USER}:${process.env.POSTGRES_PASSWORD}@${process.env.POSTGRES_HOST}:${process.env.POSTGRES_PORT}/${process.env.POSTGRES_DB}`, + + /** + * Get MongoDB connection string + */ + getMongoUrl: () => { + if (process.env.MONGODB_URI) { + return process.env.MONGODB_URI; + } + return `mongodb://${process.env.MONGODB_USERNAME}:${process.env.MONGODB_PASSWORD}@${process.env.MONGODB_HOST}:${process.env.MONGODB_PORT}/${process.env.MONGODB_DATABASE}`; + }, + + /** + * Wait for services to be ready + */ + waitForServices +}; diff --git a/test/setup.js b/test/setup.js index cf52c79..5f67360 100644 --- a/test/setup.js +++ b/test/setup.js @@ -1,112 +1,112 @@ -"use strict"; -/** - * Bun Test Setup File for Stock Bot Trading Platform - * - * Global test configuration and utilities available across all tests. - * This file is executed before each test via bunfig.toml preload. - */ -// Increase test timeout if needed (already configured in bunfig.toml) -// Bun.timeout = 30000; -// Store original console methods to allow restoration -const originalConsole = global.console; -// Mock console methods to reduce noise during tests -// These can be restored with testHelpers.restoreConsole() -console.log = () => { }; -console.debug = () => { }; -console.info = () => { }; -console.warn = () => { }; -console.error = () => { }; -global.testHelpers = { - /** - * Sleep utility for async tests - */ - sleep: (ms) => new Promise(resolve => setTimeout(resolve, ms)), - /** - * Consistent mock timestamp for tests - */ - mockTimestamp: () => new Date('2024-01-01T12:00:00Z'), - /** - * Generate test OHLCV data - */ - generateTestOHLCV: (symbol = 'AAPL', overrides = {}) => ({ - symbol, - open: 150.0, - high: 153.0, - low: 149.0, - close: 152.5, - volume: 10000, - timestamp: global.testHelpers.mockTimestamp(), - ...overrides - }), - /** - * Generate test trade data - */ - generateTestTrade: (symbol = 'AAPL', overrides = {}) => ({ - symbol, - price: 152.5, - size: 100, - timestamp: global.testHelpers.mockTimestamp(), - exchange: 'NASDAQ', - conditions: ['@', 'T'], - ...overrides - }), - /** - * Generate test quote data - */ - generateTestQuote: (symbol = 'AAPL', overrides = {}) => ({ - symbol, - bidPrice: 152.45, - bidSize: 200, - askPrice: 152.55, - askSize: 150, - timestamp: global.testHelpers.mockTimestamp(), - ...overrides - }), - /** - * Create a mock logger - */ - mockLogger: () => ({ - debug: () => { }, - info: () => { }, - warn: () => { }, - error: () => { }, - critical: () => { }, - }), - /** - * Restore console methods - */ - restoreConsole: () => { - global.console = originalConsole; - } -}; -// Set up spyOn utilities -// Similar to jest.spyOn but using Bun's built-in spy functionality -// This makes it easier to migrate from Jest -global.spyOn = function (object, method) { - const original = object[method]; - const mock = function (...args) { - mock.mock.calls.push(args); - return mock.mockImplementation ? mock.mockImplementation(...args) : original.apply(object, args); - }; - mock.mock = { calls: [] }; - mock.mockClear = () => { mock.mock.calls = []; return mock; }; - mock.mockReset = () => { - mock.mock.calls = []; - mock.mockImplementation = null; - return mock; - }; - mock.mockImplementation = null; - mock.mockReturnValue = (value) => { - mock.mockImplementation = () => value; - return mock; - }; - mock.mockResolvedValue = (value) => { - return mock.mockReturnValue(Promise.resolve(value)); - }; - mock.mockRejectedValue = (value) => { - return mock.mockReturnValue(Promise.reject(value)); - }; - object[method] = mock; - return mock; -}; +"use strict"; +/** + * Bun Test Setup File for Stock Bot Trading Platform + * + * Global test configuration and utilities available across all tests. + * This file is executed before each test via bunfig.toml preload. + */ +// Increase test timeout if needed (already configured in bunfig.toml) +// Bun.timeout = 30000; +// Store original console methods to allow restoration +const originalConsole = global.console; +// Mock console methods to reduce noise during tests +// These can be restored with testHelpers.restoreConsole() +console.log = () => { }; +console.debug = () => { }; +console.info = () => { }; +console.warn = () => { }; +console.error = () => { }; +global.testHelpers = { + /** + * Sleep utility for async tests + */ + sleep: (ms) => new Promise(resolve => setTimeout(resolve, ms)), + /** + * Consistent mock timestamp for tests + */ + mockTimestamp: () => new Date('2024-01-01T12:00:00Z'), + /** + * Generate test OHLCV data + */ + generateTestOHLCV: (symbol = 'AAPL', overrides = {}) => ({ + symbol, + open: 150.0, + high: 153.0, + low: 149.0, + close: 152.5, + volume: 10000, + timestamp: global.testHelpers.mockTimestamp(), + ...overrides + }), + /** + * Generate test trade data + */ + generateTestTrade: (symbol = 'AAPL', overrides = {}) => ({ + symbol, + price: 152.5, + size: 100, + timestamp: global.testHelpers.mockTimestamp(), + exchange: 'NASDAQ', + conditions: ['@', 'T'], + ...overrides + }), + /** + * Generate test quote data + */ + generateTestQuote: (symbol = 'AAPL', overrides = {}) => ({ + symbol, + bidPrice: 152.45, + bidSize: 200, + askPrice: 152.55, + askSize: 150, + timestamp: global.testHelpers.mockTimestamp(), + ...overrides + }), + /** + * Create a mock logger + */ + mockLogger: () => ({ + debug: () => { }, + info: () => { }, + warn: () => { }, + error: () => { }, + critical: () => { }, + }), + /** + * Restore console methods + */ + restoreConsole: () => { + global.console = originalConsole; + } +}; +// Set up spyOn utilities +// Similar to jest.spyOn but using Bun's built-in spy functionality +// This makes it easier to migrate from Jest +global.spyOn = function (object, method) { + const original = object[method]; + const mock = function (...args) { + mock.mock.calls.push(args); + return mock.mockImplementation ? mock.mockImplementation(...args) : original.apply(object, args); + }; + mock.mock = { calls: [] }; + mock.mockClear = () => { mock.mock.calls = []; return mock; }; + mock.mockReset = () => { + mock.mock.calls = []; + mock.mockImplementation = null; + return mock; + }; + mock.mockImplementation = null; + mock.mockReturnValue = (value) => { + mock.mockImplementation = () => value; + return mock; + }; + mock.mockResolvedValue = (value) => { + return mock.mockReturnValue(Promise.resolve(value)); + }; + mock.mockRejectedValue = (value) => { + return mock.mockReturnValue(Promise.reject(value)); + }; + object[method] = mock; + return mock; +}; //# sourceMappingURL=setup.js.map \ No newline at end of file diff --git a/test/setup.ts b/test/setup.ts index 0bfb8d4..9c78686 100644 --- a/test/setup.ts +++ b/test/setup.ts @@ -1,136 +1,136 @@ -/** - * Bun Test Setup File for Stock Bot Trading Platform - * - * Global test configuration and utilities available across all tests. - * This file is executed before each test via bunfig.toml preload. - */ - -// Increase test timeout if needed (already configured in bunfig.toml) -// Bun.timeout = 30000; - -// Store original console methods to allow restoration -const originalConsole = global.console; - -// Mock console methods to reduce noise during tests -// These can be restored with testHelpers.restoreConsole() -console.log = () => {}; -console.debug = () => {}; -console.info = () => {}; -console.warn = () => {}; -console.error = () => {}; - -// Global test utilities available in all test files -declare global { - var testHelpers: { - sleep: (ms: number) => Promise; - mockTimestamp: () => Date; - generateTestOHLCV: (symbol?: string, overrides?: any) => any; - generateTestTrade: (symbol?: string, overrides?: any) => any; - generateTestQuote: (symbol?: string, overrides?: any) => any; - mockLogger: () => any; - restoreConsole: () => void; - }; -} - -global.testHelpers = { - /** - * Sleep utility for async tests - */ - sleep: (ms: number) => new Promise(resolve => setTimeout(resolve, ms)), - - /** - * Consistent mock timestamp for tests - */ - mockTimestamp: () => new Date('2024-01-01T12:00:00Z'), - - /** - * Generate test OHLCV data - */ - generateTestOHLCV: (symbol = 'AAPL', overrides = {}) => ({ - symbol, - open: 150.0, - high: 153.0, - low: 149.0, - close: 152.5, - volume: 10000, - timestamp: global.testHelpers.mockTimestamp(), - ...overrides - }), - - /** - * Generate test trade data - */ - generateTestTrade: (symbol = 'AAPL', overrides = {}) => ({ - symbol, - price: 152.5, - size: 100, - timestamp: global.testHelpers.mockTimestamp(), - exchange: 'NASDAQ', - conditions: ['@', 'T'], - ...overrides - }), - - /** - * Generate test quote data - */ - generateTestQuote: (symbol = 'AAPL', overrides = {}) => ({ - symbol, - bidPrice: 152.45, - bidSize: 200, - askPrice: 152.55, - askSize: 150, - timestamp: global.testHelpers.mockTimestamp(), - ...overrides - }), - - /** - * Create a mock logger - */ - mockLogger: () => ({ - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - critical: () => {}, - }), - - /** - * Restore console methods - */ - restoreConsole: () => { - global.console = originalConsole; - } -}; - -// Set up spyOn utilities -// Similar to jest.spyOn but using Bun's built-in spy functionality -// This makes it easier to migrate from Jest -global.spyOn = function(object: any, method: string) { - const original = object[method]; - const mock = function(...args: any[]) { - mock.mock.calls.push(args); - return mock.mockImplementation ? mock.mockImplementation(...args) : original.apply(object, args); - }; - - mock.mock = { calls: [] }; - mock.mockClear = () => { mock.mock.calls = []; return mock; }; - mock.mockReset = () => { - mock.mock.calls = []; - mock.mockImplementation = null; - return mock; - }; - mock.mockImplementation = null; - mock.mockReturnValue = (value: any) => { - mock.mockImplementation = () => value; - return mock; - }; - mock.mockResolvedValue = (value: any) => { - return mock.mockReturnValue(Promise.resolve(value)); - }; - mock.mockRejectedValue = (value: any) => { - return mock.mockReturnValue(Promise.reject(value)); - }; - - object[method] = mock; - return mock; -}; +/** + * Bun Test Setup File for Stock Bot Trading Platform + * + * Global test configuration and utilities available across all tests. + * This file is executed before each test via bunfig.toml preload. + */ + +// Increase test timeout if needed (already configured in bunfig.toml) +// Bun.timeout = 30000; + +// Store original console methods to allow restoration +const originalConsole = global.console; + +// Mock console methods to reduce noise during tests +// These can be restored with testHelpers.restoreConsole() +console.log = () => {}; +console.debug = () => {}; +console.info = () => {}; +console.warn = () => {}; +console.error = () => {}; + +// Global test utilities available in all test files +declare global { + var testHelpers: { + sleep: (ms: number) => Promise; + mockTimestamp: () => Date; + generateTestOHLCV: (symbol?: string, overrides?: any) => any; + generateTestTrade: (symbol?: string, overrides?: any) => any; + generateTestQuote: (symbol?: string, overrides?: any) => any; + mockLogger: () => any; + restoreConsole: () => void; + }; +} + +global.testHelpers = { + /** + * Sleep utility for async tests + */ + sleep: (ms: number) => new Promise(resolve => setTimeout(resolve, ms)), + + /** + * Consistent mock timestamp for tests + */ + mockTimestamp: () => new Date('2024-01-01T12:00:00Z'), + + /** + * Generate test OHLCV data + */ + generateTestOHLCV: (symbol = 'AAPL', overrides = {}) => ({ + symbol, + open: 150.0, + high: 153.0, + low: 149.0, + close: 152.5, + volume: 10000, + timestamp: global.testHelpers.mockTimestamp(), + ...overrides + }), + + /** + * Generate test trade data + */ + generateTestTrade: (symbol = 'AAPL', overrides = {}) => ({ + symbol, + price: 152.5, + size: 100, + timestamp: global.testHelpers.mockTimestamp(), + exchange: 'NASDAQ', + conditions: ['@', 'T'], + ...overrides + }), + + /** + * Generate test quote data + */ + generateTestQuote: (symbol = 'AAPL', overrides = {}) => ({ + symbol, + bidPrice: 152.45, + bidSize: 200, + askPrice: 152.55, + askSize: 150, + timestamp: global.testHelpers.mockTimestamp(), + ...overrides + }), + + /** + * Create a mock logger + */ + mockLogger: () => ({ + debug: () => {}, + info: () => {}, + warn: () => {}, + error: () => {}, + critical: () => {}, + }), + + /** + * Restore console methods + */ + restoreConsole: () => { + global.console = originalConsole; + } +}; + +// Set up spyOn utilities +// Similar to jest.spyOn but using Bun's built-in spy functionality +// This makes it easier to migrate from Jest +global.spyOn = function(object: any, method: string) { + const original = object[method]; + const mock = function(...args: any[]) { + mock.mock.calls.push(args); + return mock.mockImplementation ? mock.mockImplementation(...args) : original.apply(object, args); + }; + + mock.mock = { calls: [] }; + mock.mockClear = () => { mock.mock.calls = []; return mock; }; + mock.mockReset = () => { + mock.mock.calls = []; + mock.mockImplementation = null; + return mock; + }; + mock.mockImplementation = null; + mock.mockReturnValue = (value: any) => { + mock.mockImplementation = () => value; + return mock; + }; + mock.mockResolvedValue = (value: any) => { + return mock.mockReturnValue(Promise.resolve(value)); + }; + mock.mockRejectedValue = (value: any) => { + return mock.mockReturnValue(Promise.reject(value)); + }; + + object[method] = mock; + return mock; +}; diff --git a/tsconfig.app.json b/tsconfig.app.json index c4a9fd6..047d2df 100644 --- a/tsconfig.app.json +++ b/tsconfig.app.json @@ -1,10 +1,10 @@ -{ - "extends": "../../../tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src", - "types": ["bun-types"] - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist"] -} +{ + "extends": "../../../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "types": ["bun-types"] + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} diff --git a/tsconfig.json b/tsconfig.json index afccebc..a0dc721 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,71 +1,71 @@ -{ - "$schema": "https://json.schemastore.org/tsconfig", - "compilerOptions": { - // JavaScript output target version - "target": "ES2022", - // Module configuration for different project types - "module": "ESNext", - "moduleResolution": "bundler", - "composite": true, - - // Type checking - "strict": true, - "noImplicitAny": true, - "strictNullChecks": true, - "noImplicitThis": true, - "alwaysStrict": true, - "declarationMap": true, - - // Module interoperability - "esModuleInterop": true, - "allowSyntheticDefaultImports": true, - - // Additional features - "skipLibCheck": true, - "forceConsistentCasingInFileNames": true, - "resolveJsonModule": true, - "sourceMap": false, - "declaration": true, - "disableReferencedProjectLoad": true, - "disableSourceOfProjectReferenceRedirect": false, - - // Paths and output - "baseUrl": ".", - "paths": { - "@stock-bot/*": ["libs/*/src"] - } - }, - "exclude": [ - "node_modules", - "dist" - ], - "references": [ - // Core libraries first - { "path": "./libs/types" }, - { "path": "./libs/config" }, - { "path": "./libs/logger" }, - { "path": "./libs/utils" }, - - // Database clients - { "path": "./libs/postgres-client" }, - { "path": "./libs/mongodb-client" }, - { "path": "./libs/questdb-client" }, - - // Service libraries - { "path": "./libs/cache" }, - { "path": "./libs/http" }, - { "path": "./libs/event-bus" }, - { "path": "./libs/shutdown" }, - // Engine libraries - { "path": "./libs/data-frame" }, - { "path": "./libs/vector-engine" }, - { "path": "./libs/strategy-engine" }, - - // Applications - { "path": "./apps/data-service" }, - { "path": "./apps/execution-service" }, - { "path": "./apps/portfolio-service" }, - { "path": "./apps/processing-service" }, - { "path": "./apps/strategy-service" } - ] +{ + "$schema": "https://json.schemastore.org/tsconfig", + "compilerOptions": { + // JavaScript output target version + "target": "ES2022", + // Module configuration for different project types + "module": "ESNext", + "moduleResolution": "bundler", + "composite": true, + + // Type checking + "strict": true, + "noImplicitAny": true, + "strictNullChecks": true, + "noImplicitThis": true, + "alwaysStrict": true, + "declarationMap": true, + + // Module interoperability + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + + // Additional features + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "sourceMap": false, + "declaration": true, + "disableReferencedProjectLoad": true, + "disableSourceOfProjectReferenceRedirect": false, + + // Paths and output + "baseUrl": ".", + "paths": { + "@stock-bot/*": ["libs/*/src"] + } + }, + "exclude": [ + "node_modules", + "dist" + ], + "references": [ + // Core libraries first + { "path": "./libs/types" }, + { "path": "./libs/config" }, + { "path": "./libs/logger" }, + { "path": "./libs/utils" }, + + // Database clients + { "path": "./libs/postgres-client" }, + { "path": "./libs/mongodb-client" }, + { "path": "./libs/questdb-client" }, + + // Service libraries + { "path": "./libs/cache" }, + { "path": "./libs/http" }, + { "path": "./libs/event-bus" }, + { "path": "./libs/shutdown" }, + // Engine libraries + { "path": "./libs/data-frame" }, + { "path": "./libs/vector-engine" }, + { "path": "./libs/strategy-engine" }, + + // Applications + { "path": "./apps/data-service" }, + { "path": "./apps/execution-service" }, + { "path": "./apps/portfolio-service" }, + { "path": "./apps/processing-service" }, + { "path": "./apps/strategy-service" } + ] } \ No newline at end of file diff --git a/tsconfig.lib.json b/tsconfig.lib.json index 090238f..15018aa 100644 --- a/tsconfig.lib.json +++ b/tsconfig.lib.json @@ -1,11 +1,11 @@ -{ - "extends": "./tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "rootDir": "./src", - "declaration": true, - "composite": true - }, - "include": ["src/**/*"], - "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"] -} +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./src", + "declaration": true, + "composite": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"] +} diff --git a/turbo.json b/turbo.json index 0f97c13..478f1a8 100644 --- a/turbo.json +++ b/turbo.json @@ -1,47 +1,47 @@ -{ - "$schema": "https://turbo.build/schema.json", - "ui": "tui", - "globalDependencies": ["**/.env.*local"], - "tasks": { - "build": { - "dependsOn": ["^build"], - "outputs": ["dist/**", ".next/**", "!.next/cache/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - }, - "build:libs": { - "dependsOn": [], - "outputs": ["dist/**"], - "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] - }, - "dev": { - "dependsOn": ["^build"], - "cache": false, - "persistent": true - }, - "test": { - "dependsOn": ["build"], - "outputs": [] - }, - "lint": { - "dependsOn": ["^lint"] - }, - "clean": { - "cache": false - }, - "start": { - "dependsOn": ["build"], - "cache": false, - "persistent": true - }, - "backtest": { - "dependsOn": ["build"], - "cache": false - } - }, - "globalEnv": [ - "NODE_ENV", - "DATA_SERVICE_PORT", - "DRAGONFLY_HOST", - "DRAGONFLY_PORT" - ] -} +{ + "$schema": "https://turbo.build/schema.json", + "ui": "tui", + "globalDependencies": ["**/.env.*local"], + "tasks": { + "build": { + "dependsOn": ["^build"], + "outputs": ["dist/**", ".next/**", "!.next/cache/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + }, + "build:libs": { + "dependsOn": [], + "outputs": ["dist/**"], + "inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"] + }, + "dev": { + "dependsOn": ["^build"], + "cache": false, + "persistent": true + }, + "test": { + "dependsOn": ["build"], + "outputs": [] + }, + "lint": { + "dependsOn": ["^lint"] + }, + "clean": { + "cache": false + }, + "start": { + "dependsOn": ["build"], + "cache": false, + "persistent": true + }, + "backtest": { + "dependsOn": ["build"], + "cache": false + } + }, + "globalEnv": [ + "NODE_ENV", + "DATA_SERVICE_PORT", + "DRAGONFLY_HOST", + "DRAGONFLY_PORT" + ] +}