deleted a lot of the stuff

This commit is contained in:
Bojan Kucera 2025-06-04 10:50:05 -04:00
parent d22f7aafa0
commit 3e451558ac
173 changed files with 1313 additions and 30205 deletions

9
.gitignore vendored
View file

@ -99,9 +99,12 @@ ehthumbs.db
Thumbs.db
# Trading bot specific
data/
backtest-results/
logs/
.data/
.backtest-results/
.logs/
.old/
.mongo/
.chat/
*.db
*.sqlite
*.sqlite3

View file

@ -1,703 +0,0 @@
# 🏗️ Stock Bot Trading System - Architecture Documentation
## 📋 Table of Contents
- [System Overview](#system-overview)
- [Current Architecture](#current-architecture)
- [Service Breakdown](#service-breakdown)
- [Data Flow](#data-flow)
- [Technology Stack](#technology-stack)
- [Future Architecture](#future-architecture)
- [Improvement Recommendations](#improvement-recommendations)
- [Deployment Architecture](#deployment-architecture)
- [Security Architecture](#security-architecture)
- [Monitoring & Observability](#monitoring--observability)
---
## 🎯 System Overview
The Stock Bot Trading System is a **microservice-based**, **event-driven** trading platform built for **real-time market analysis**, **strategy execution**, and **risk management**. The system follows a **service-oriented architecture (SOA)** with **clear separation of concerns** and **horizontal scalability**.
### Core Principles
- **Microservices Architecture**: Independent, deployable services
- **Event-Driven Communication**: WebSocket and Redis pub/sub
- **Real-Time Processing**: Sub-second latency requirements
- **Scalable Design**: Horizontal scaling capabilities
- **Fault Tolerance**: Circuit breakers and graceful degradation
- **Type Safety**: Full TypeScript implementation
---
## 🏗️ Current Architecture
```mermaid
graph TB
subgraph "Frontend Layer"
UI[Angular Trading Dashboard]
end
subgraph "API Gateway Layer"
GW[API Gateway - Future]
end
subgraph "Core Services"
MDG[Market Data Gateway<br/>Port 3001]
RG[Risk Guardian<br/>Port 3002]
SO[Strategy Orchestrator<br/>Port 4001]
end
subgraph "Data Layer"
Redis[(Redis Cache)]
PG[(PostgreSQL)]
QDB[(QuestDB)]
Mongo[(MongoDB)]
end
subgraph "External APIs"
Alpha[Alpha Vantage]
IEX[IEX Cloud]
Yahoo[Yahoo Finance]
end
UI -->|WebSocket/HTTP| MDG
UI -->|WebSocket/HTTP| RG
UI -->|WebSocket/HTTP| SO
MDG --> Redis
MDG --> QDB
MDG -->|Fetch Data| Alpha
MDG -->|Fetch Data| IEX
MDG -->|Fetch Data| Yahoo
RG --> Redis
RG --> PG
SO --> Redis
SO --> PG
SO --> Mongo
```
---
## 🔧 Service Breakdown
### **1. Interface Services**
#### **Trading Dashboard** (`apps/interface-services/trading-dashboard`)
- **Framework**: Angular 20 + Angular Material + Tailwind CSS
- **Port**: 4200 (development)
- **Purpose**: Real-time trading interface and strategy management
- **Key Features**:
- Real-time market data visualization
- Strategy creation and backtesting UI
- Risk management dashboard
- Portfolio monitoring
- WebSocket integration for live updates
**Current Structure:**
```
trading-dashboard/
├── src/
│ ├── app/
│ │ ├── components/ # Reusable UI components
│ │ │ ├── sidebar/ # Navigation sidebar
│ │ │ └── notifications/ # Alert system
│ │ ├── pages/ # Route-based pages
│ │ │ ├── dashboard/ # Main trading dashboard
│ │ │ ├── market-data/ # Market data visualization
│ │ │ ├── portfolio/ # Portfolio management
│ │ │ ├── strategies/ # Strategy management
│ │ │ └── risk-management/ # Risk controls
│ │ ├── services/ # Angular services
│ │ │ ├── api.service.ts # HTTP API communication
│ │ │ ├── websocket.service.ts # WebSocket management
│ │ │ └── strategy.service.ts # Strategy operations
│ │ └── shared/ # Shared utilities
│ └── styles.css # Global styles
```
### **2. Core Services**
#### **Market Data Gateway** (`apps/core-services/market-data-gateway`)
- **Framework**: Hono + Bun
- **Port**: 3001
- **Purpose**: Market data aggregation and real-time distribution
- **Database**: QuestDB (time-series), Redis (caching)
**Responsibilities:**
- Aggregate data from multiple market data providers
- Real-time WebSocket streaming to clients
- Historical data storage and retrieval
- Rate limiting and API management
- Data normalization and validation
**API Endpoints:**
```
GET /health # Health check
GET /api/market-data/:symbol # Get latest market data
GET /api/historical/:symbol # Get historical data
WS /ws # WebSocket for real-time data
```
#### **Risk Guardian** (`apps/core-services/risk-guardian`)
- **Framework**: Hono + Bun
- **Port**: 3002
- **Purpose**: Real-time risk monitoring and controls
- **Database**: PostgreSQL (persistent), Redis (real-time)
**Responsibilities:**
- Position size monitoring
- Daily loss tracking
- Portfolio risk assessment
- Volatility monitoring
- Real-time risk alerts
- Risk threshold management
**API Endpoints:**
```
GET /health # Health check
GET /api/risk/thresholds # Get risk thresholds
PUT /api/risk/thresholds # Update risk thresholds
POST /api/risk/evaluate # Evaluate position risk
GET /api/risk/history # Risk evaluation history
WS /ws # WebSocket for risk alerts
```
#### **Strategy Orchestrator** (`apps/intelligence-services/strategy-orchestrator`)
- **Framework**: Hono + Bun
- **Port**: 4001
- **Purpose**: Strategy lifecycle management and execution
- **Database**: MongoDB (strategies), PostgreSQL (trades), Redis (signals)
**Responsibilities:**
- Strategy creation and management
- Backtesting engine (vectorized & event-based)
- Real-time strategy execution
- Signal generation and broadcasting
- Performance analytics
- Strategy optimization
**Current Structure:**
```
strategy-orchestrator/
├── src/
│ ├── core/
│ │ ├── backtesting/
│ │ │ ├── BacktestEngine.ts # Main backtesting engine
│ │ │ ├── BacktestService.ts # Backtesting service layer
│ │ │ ├── MarketDataFeed.ts # Historical data provider
│ │ │ └── PerformanceAnalytics.ts # Performance metrics
│ │ ├── execution/
│ │ │ └── StrategyExecutionService.ts # Real-time execution
│ │ ├── strategies/
│ │ │ ├── Strategy.ts # Base strategy interface
│ │ │ ├── StrategyRegistry.ts # Strategy management
│ │ │ ├── BaseStrategy.ts # Abstract base class
│ │ │ ├── VectorizedStrategy.ts # Vectorized base class
│ │ │ ├── MovingAverageCrossover.ts # MA strategy
│ │ │ └── MeanReversionStrategy.ts # Mean reversion
│ │ └── indicators/
│ │ └── TechnicalIndicators.ts # Technical analysis
│ ├── controllers/
│ │ └── StrategyController.ts # API endpoints
│ └── index.ts # Main entry point
```
**API Endpoints:**
```
GET /health # Health check
GET /api/strategies # List strategies
POST /api/strategies # Create strategy
PUT /api/strategies/:id # Update strategy
POST /api/strategies/:id/:action # Start/stop/pause strategy
GET /api/strategies/:id/signals # Get strategy signals
POST /api/strategies/:id/backtest # Run backtest
GET /api/strategies/:id/performance # Get performance metrics
WS /ws # WebSocket for strategy updates
```
### **3. Shared Packages**
#### **Shared Types** (`packages/types`)
```typescript
export interface MarketData {
symbol: string;
price: number;
volume: number;
timestamp: Date;
bid: number;
ask: number;
}
export interface Strategy {
id: string;
name: string;
symbols: string[];
parameters: Record<string, any>;
status: 'ACTIVE' | 'PAUSED' | 'STOPPED';
}
export interface BacktestResult {
totalReturn: number;
sharpeRatio: number;
maxDrawdown: number;
winRate: number;
totalTrades: number;
}
```
#### **Configuration** (`packages/config`)
```typescript
export const config = {
redis: {
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT || '6379')
},
database: {
postgres: process.env.POSTGRES_URL,
questdb: process.env.QUESTDB_URL,
mongodb: process.env.MONGODB_URL
},
marketData: {
alphaVantageKey: process.env.ALPHA_VANTAGE_KEY,
iexKey: process.env.IEX_KEY
}
};
```
---
## 🔄 Data Flow
### **Real-Time Market Data Flow**
```mermaid
sequenceDiagram
participant EXT as External APIs
participant MDG as Market Data Gateway
participant Redis as Redis Cache
participant QDB as QuestDB
participant UI as Trading Dashboard
EXT->>MDG: Market data feed
MDG->>Redis: Cache latest prices
MDG->>QDB: Store historical data
MDG->>UI: WebSocket broadcast
UI->>UI: Update charts/tables
```
### **Strategy Execution Flow**
```mermaid
sequenceDiagram
participant UI as Trading Dashboard
participant SO as Strategy Orchestrator
participant MDG as Market Data Gateway
participant RG as Risk Guardian
participant Redis as Redis
UI->>SO: Start strategy
SO->>MDG: Subscribe to market data
MDG->>SO: Real-time price updates
SO->>SO: Generate trading signals
SO->>RG: Risk evaluation
RG->>SO: Risk approval/rejection
SO->>Redis: Store signals
SO->>UI: WebSocket signal broadcast
```
---
## 💻 Technology Stack
### **Backend**
- **Runtime**: Bun (ultra-fast JavaScript runtime)
- **Web Framework**: Hono (lightweight, fast web framework)
- **Language**: TypeScript (type safety)
- **Build Tool**: Turbo (monorepo management)
### **Frontend**
- **Framework**: Angular 20 (latest stable)
- **UI Library**: Angular Material + Tailwind CSS
- **State Management**: Angular Signals (reactive programming)
- **WebSocket**: Native WebSocket API
### **Databases**
- **Time-Series**: QuestDB (market data storage)
- **Relational**: PostgreSQL (structured data)
- **Document**: MongoDB (strategy configurations)
- **Cache/Pub-Sub**: Redis (real-time data)
### **Infrastructure**
- **Containerization**: Docker + Docker Compose
- **Process Management**: PM2 (production)
- **Monitoring**: Built-in health checks
- **Development**: Hot reload, TypeScript compilation
---
## 🚀 Future Architecture
### **Phase 1: Enhanced Microservices (Q2 2025)**
```mermaid
graph TB
subgraph "API Gateway Layer"
GW[Kong/Envoy API Gateway]
LB[Load Balancer]
end
subgraph "Authentication"
AUTH[Auth Service<br/>JWT + OAuth]
end
subgraph "Core Services"
MDG[Market Data Gateway]
RG[Risk Guardian]
SO[Strategy Orchestrator]
OE[Order Execution Engine]
NS[Notification Service]
end
subgraph "Analytics Services"
BA[Backtest Analytics]
PA[Performance Analytics]
ML[ML Prediction Service]
end
subgraph "Message Queue"
NATS[NATS/Apache Kafka]
end
```
### **Phase 2: Machine Learning Integration (Q3 2025)**
- **ML Pipeline**: Python-based ML services
- **Feature Engineering**: Real-time feature computation
- **Model Training**: Automated model retraining
- **Prediction API**: Real-time predictions
### **Phase 3: Multi-Asset Support (Q4 2025)**
- **Crypto Trading**: Binance, Coinbase integration
- **Forex Trading**: OANDA, FXCM integration
- **Options Trading**: Interactive Brokers integration
- **Futures Trading**: CME, ICE integration
---
## 📈 Improvement Recommendations
### **1. High Priority Improvements**
#### **API Gateway Implementation**
```typescript
// Implement Kong or Envoy for:
- Rate limiting per service
- Authentication/authorization
- Request/response transformation
- Circuit breaker patterns
- Load balancing
```
#### **Enhanced Error Handling**
```typescript
// Implement structured error handling:
interface ServiceError {
code: string;
message: string;
service: string;
timestamp: Date;
correlationId: string;
}
```
#### **Comprehensive Logging**
```typescript
// Implement structured logging:
interface LogEntry {
level: 'debug' | 'info' | 'warn' | 'error';
service: string;
message: string;
metadata: Record<string, any>;
timestamp: Date;
correlationId: string;
}
```
### **2. Medium Priority Improvements**
#### **Database Optimization**
```sql
-- QuestDB optimizations for market data:
CREATE TABLE market_data (
symbol SYMBOL,
timestamp TIMESTAMP,
price DOUBLE,
volume DOUBLE,
bid DOUBLE,
ask DOUBLE
) timestamp(timestamp) PARTITION BY DAY;
-- Add indexes for fast queries:
CREATE INDEX idx_symbol_timestamp ON market_data (symbol, timestamp);
```
#### **Caching Strategy**
```typescript
// Implement multi-layer caching:
interface CacheStrategy {
L1: Map<string, any>; // In-memory cache
L2: Redis; // Distributed cache
L3: Database; // Persistent storage
}
```
#### **WebSocket Optimization**
```typescript
// Implement WebSocket connection pooling:
interface WSConnectionPool {
connections: Map<string, WebSocket[]>;
balancer: RoundRobinBalancer;
heartbeat: HeartbeatManager;
}
```
### **3. Low Priority Improvements**
#### **Code Quality**
- Implement comprehensive unit tests (>90% coverage)
- Add integration tests for all services
- Implement E2E tests for critical user flows
- Add performance benchmarks
#### **Documentation**
- API documentation with OpenAPI/Swagger
- Developer onboarding guide
- Deployment runbooks
- Architecture decision records (ADRs)
---
## 🏗️ Deployment Architecture
### **Development Environment**
```yaml
# docker-compose.dev.yml
version: '3.8'
services:
# Databases
postgres:
image: postgres:15
ports: ["5432:5432"]
redis:
image: redis:7-alpine
ports: ["6379:6379"]
questdb:
image: questdb/questdb:latest
ports: ["9000:9000", "8812:8812"]
mongodb:
image: mongo:6
ports: ["27017:27017"]
# Services
market-data-gateway:
build: ./apps/core-services/market-data-gateway
ports: ["3001:3001"]
depends_on: [redis, questdb]
risk-guardian:
build: ./apps/core-services/risk-guardian
ports: ["3002:3002"]
depends_on: [postgres, redis]
strategy-orchestrator:
build: ./apps/intelligence-services/strategy-orchestrator
ports: ["4001:4001"]
depends_on: [mongodb, postgres, redis]
trading-dashboard:
build: ./apps/interface-services/trading-dashboard
ports: ["4200:4200"]
```
### **Production Environment**
```yaml
# kubernetes deployment example
apiVersion: apps/v1
kind: Deployment
metadata:
name: market-data-gateway
spec:
replicas: 3
selector:
matchLabels:
app: market-data-gateway
template:
metadata:
labels:
app: market-data-gateway
spec:
containers:
- name: market-data-gateway
image: stockbot/market-data-gateway:latest
ports:
- containerPort: 3001
resources:
requests:
memory: "256Mi"
cpu: "250m"
limits:
memory: "512Mi"
cpu: "500m"
```
---
## 🔒 Security Architecture
### **Authentication & Authorization**
```typescript
// JWT-based authentication
interface AuthToken {
userId: string;
roles: string[];
permissions: string[];
expiresAt: Date;
}
// Role-based access control
enum UserRole {
TRADER = 'TRADER',
ADMIN = 'ADMIN',
VIEWER = 'VIEWER'
}
enum Permission {
READ_MARKET_DATA = 'READ_MARKET_DATA',
EXECUTE_TRADES = 'EXECUTE_TRADES',
MANAGE_STRATEGIES = 'MANAGE_STRATEGIES',
CONFIGURE_RISK = 'CONFIGURE_RISK'
}
```
### **API Security**
```typescript
// Rate limiting configuration
interface RateLimit {
windowMs: number; // 15 minutes
maxRequests: number; // 100 requests per window
skipIf: (req: Request) => boolean;
}
// Input validation
interface ApiValidation {
schema: JSONSchema;
sanitization: SanitizationRules;
authentication: AuthenticationMiddleware;
}
```
### **Data Security**
- **Encryption at Rest**: AES-256 for sensitive data
- **Encryption in Transit**: TLS 1.3 for all communications
- **Secrets Management**: Kubernetes secrets or HashiCorp Vault
- **Network Security**: VPC, security groups, firewalls
---
## 📊 Monitoring & Observability
### **Metrics Collection**
```typescript
interface ServiceMetrics {
// Performance metrics
requestLatency: Histogram;
requestRate: Counter;
errorRate: Counter;
// Business metrics
tradesExecuted: Counter;
strategiesActive: Gauge;
portfolioValue: Gauge;
// System metrics
memoryUsage: Gauge;
cpuUsage: Gauge;
dbConnections: Gauge;
}
```
### **Health Checks**
```typescript
interface HealthCheck {
service: string;
status: 'healthy' | 'degraded' | 'unhealthy';
checks: {
database: boolean;
redis: boolean;
externalApis: boolean;
webSocket: boolean;
};
uptime: number;
version: string;
}
```
### **Alerting Rules**
```yaml
# Prometheus alerting rules
groups:
- name: stockbot
rules:
- alert: HighErrorRate
expr: rate(http_requests_total{status=~"5.."}[5m]) > 0.1
for: 2m
- alert: HighLatency
expr: http_request_duration_seconds{quantile="0.95"} > 1
for: 2m
- alert: ServiceDown
expr: up{job="stockbot"} == 0
for: 30s
```
---
## 📋 Migration Plan
### **Phase 1: Current → Enhanced (1-2 months)**
1. **Week 1-2**: Implement API Gateway and authentication
2. **Week 3-4**: Add comprehensive logging and monitoring
3. **Week 5-6**: Enhance error handling and resilience
4. **Week 7-8**: Performance optimization and testing
### **Phase 2: Enhanced → ML-Ready (2-3 months)**
1. **Month 1**: Implement ML pipeline infrastructure
2. **Month 2**: Develop feature engineering services
3. **Month 3**: Integrate ML predictions into strategies
### **Phase 3: ML-Ready → Multi-Asset (3-4 months)**
1. **Month 1**: Abstract market data interfaces
2. **Month 2**: Implement crypto trading support
3. **Month 3**: Add forex and options trading
4. **Month 4**: Performance optimization and testing
---
## 🎯 Success Metrics
### **Technical KPIs**
- **Latency**: < 100ms for market data updates
- **Throughput**: > 10,000 requests/second
- **Availability**: 99.9% uptime
- **Error Rate**: < 0.1% of requests
### **Business KPIs**
- **Strategy Performance**: Sharpe ratio > 1.5
- **Risk Management**: Max drawdown < 5%
- **Execution Quality**: Slippage < 0.01%
- **System Adoption**: > 90% user satisfaction
---
This architecture document serves as a living blueprint for the Stock Bot Trading System, providing clear guidance for current development and future scaling decisions.

View file

@ -1,229 +0,0 @@
# 📋 Stock Bot Trading System - Architecture Context
## 🏗️ System Overview
A comprehensive, microservice-based trading bot system built with **Bun**, **TypeScript**, and **Turborepo**. The system features a service-oriented architecture designed for real-time market data processing, strategy execution, and risk management.
## 🎯 Current System Status
### ✅ **Operational Services**
- **Market Data Gateway** (`apps/core-services/market-data-gateway`) - Port 3004
- **UNIFIED IMPLEMENTATION** - Merged from duplicate services
- Real-time market data ingestion and processing
- WebSocket server for live data streaming (Port 3005)
- REST API for market data queries and configuration
- Mock data implementation for testing
- Full TypeScript implementation with resolved compilation errors
- **Trading Dashboard** (`apps/interface-services/trading-dashboard`) - Port 5173
- React + TypeScript frontend with Tremor UI
- Real-time data visualization
- WebSocket client for live updates
- Professional financial dashboard components
### 🚧 **Ready for Implementation**
- **Strategy Orchestrator** (`apps/intelligence-services/strategy-orchestrator`) - Port 4001
- Package structure created, implementation needed
- Strategy execution and management
- Signal generation coordination
- **Risk Guardian** (`apps/core-services/risk-guardian`) - Port 3002
- Package structure created, implementation needed
- Real-time risk monitoring and alerts
- Position and exposure limits
## 🏗️ Service Architecture
### **Service Categories**
```
apps/
├── core-services/ # Essential trading infrastructure
│ ├── market-data-gateway/ ✅ Operational (UNIFIED)
│ └── risk-guardian/ 📋 Ready to implement
├── intelligence-services/ # Strategy and signal generation
│ └── strategy-orchestrator/ 📋 Ready to implement
├── interface-services/ # User interfaces and APIs
│ ├── trading-dashboard/ ✅ Operational
│ └── trading-dashboard-react/ 📋 Alternative implementation
├── data-services/ # Data processing and analytics
├── execution-services/ # Order management and execution
├── integration-services/ # External system integrations
└── platform-services/ # Infrastructure and monitoring
```
### **Shared Packages**
```
packages/
├── types/ # TypeScript type definitions
├── config/ # Configuration management
├── database/ # Database utilities (planned)
└── trading-core/ # Core trading logic (planned)
```
## 🔄 Communication Architecture
### **Event-Driven Core (Dragonfly)**
- **Primary Event Bus**: Dragonfly Redis-compatible streams
- **Event Types**: Market data, trading signals, risk alerts, portfolio updates
- **Communication Pattern**: Publish/Subscribe for loose coupling
- **Real-time Processing**: Sub-millisecond event propagation
### **Data Flow Patterns**
1. **Market Data Flow**
```
External APIs → Market Data Gateway → Dragonfly Events → Dashboard/Services
```
2. **Trading Signal Flow**
```
Market Data → Strategy Orchestrator → Signals → Risk Guardian → Execution
```
3. **Real-time Updates**
```
Services → WebSocket Server → Dashboard (Live UI Updates)
```
### **API Communication**
- **REST APIs**: Service-to-service and client-to-service communication
- **WebSocket**: Real-time bidirectional communication
- **HTTP Health Checks**: Service monitoring and discovery
## 🗄️ Data Infrastructure
### **Storage Systems**
- **🐉 Dragonfly** (Port 6379): Redis-compatible event streaming and caching
- **🐘 PostgreSQL** (Port 5432): Operational data with trading schemas
- **📊 QuestDB** (Ports 9000/8812): Time-series market data and analytics
- **🍃 MongoDB** (Port 27017): Document storage for sentiment analysis and raw documents
### **Database Schemas**
- `trading.*` - Orders, positions, executions, accounts
- `strategy.*` - Strategies, signals, performance metrics
- `risk.*` - Risk limits, events, monitoring
- `audit.*` - System events, health checks, configuration
### **MongoDB Collections**
- `sentiment_analysis` - Market sentiment scores and analysis
- `raw_documents` - News articles, social media posts, research reports
- `market_events` - Significant market events and their impact
### **Admin Interfaces**
- **Redis Insight** (Port 8001): Dragonfly management
- **PgAdmin** (Port 8080): PostgreSQL administration
- **QuestDB Console** (Port 9000): Time-series data management
- **Mongo Express** (Port 8081): MongoDB document browser and editor
## 🛡️ Infrastructure & DevOps
### **Container Management**
- **Docker Compose**: Multi-service orchestration
- **Development Environment**: `npm run dev:full`
- **Infrastructure Management**: PowerShell scripts in `scripts/docker.ps1`
### **Monitoring Stack**
- **Prometheus** (Port 9090): Metrics collection
- **Grafana** (Port 3000): Dashboards and alerting
- **Health Checks**: Each service exposes `/health` endpoints
### **Development Workflow**
```bash
# Start infrastructure
npm run infra:up
# Start admin tools
npm run docker:admin
# Start development services
npm run dev:full
```
## 🌐 Port Allocation
| Service Category | Port Range | Current Services |
|-----------------|------------|------------------|
| **Core Services** | 3001-3099 | Market Data Gateway (3001), Risk Guardian (3002) |
| **Intelligence** | 4001-4099 | Strategy Orchestrator (4001) |
| **Data Services** | 5001-5099 | (Future expansion) |
| **Interface** | 5173, 8001+ | Trading Dashboard (5173) |
| **Infrastructure** | 6379, 5432, 9000+, 27017 | Dragonfly, PostgreSQL, QuestDB, MongoDB |
## 🎯 Implementation Roadmap
### **Phase 1 - Foundation** ✅ Complete
- [x] Monorepo setup with Turborepo
- [x] Market Data Gateway with real-time streaming
- [x] Professional React dashboard with Tremor UI
- [x] Docker infrastructure with Dragonfly/PostgreSQL/QuestDB
- [x] WebSocket real-time communication
### **Phase 2 - Trading Logic** 📋 Next Priority
- [ ] Strategy Orchestrator implementation
- [ ] Risk Guardian implementation
- [ ] Event-driven strategy execution
- [ ] Portfolio position tracking
### **Phase 3 - Advanced Features** ⏳ Future
- [ ] Execution Engine with broker integration
- [ ] Advanced analytics and backtesting
- [ ] Machine learning signal generation
- [ ] Multi-broker support
## 🔧 Technology Stack
### **Backend Services**
- **Runtime**: Bun (fast JavaScript runtime)
- **Framework**: Hono (lightweight web framework)
- **Language**: TypeScript (type safety)
- **Events**: Dragonfly Redis Streams
- **WebSocket**: Native WebSocket implementation
### **Frontend**
- **Framework**: React with TypeScript
- **Build Tool**: Vite (fast development)
- **UI Library**: Tremor UI (financial components)
- **Styling**: Modern CSS with responsive design
### **Infrastructure**
- **Monorepo**: Turborepo for build orchestration
- **Containers**: Docker & Docker Compose
- **Databases**: PostgreSQL, QuestDB, Dragonfly
- **Monitoring**: Prometheus + Grafana
## 🔒 Security & Configuration
### **Environment Management**
- Environment-specific configurations
- Secure credential management
- Development vs production separation
### **Service Security**
- Inter-service authentication
- API rate limiting
- Database connection security
- External API key management
## 🚀 Getting Started
1. **Prerequisites**: Docker Desktop, Bun runtime
2. **Infrastructure**: `npm run infra:up`
3. **Admin Tools**: `npm run docker:admin`
4. **Development**: `npm run dev:full`
5. **Access**: Dashboard at http://localhost:5173
## 📁 Key Configuration Files
- `turbo.json` - Monorepo build configuration
- `docker-compose.yml` - Infrastructure orchestration
- `packages/config/` - Shared configuration management
- `database/postgres/init/` - Database schema definitions
---
**Architecture Design Principles:**
- **Microservices**: Independent, scalable services
- **Event-Driven**: Loose coupling via Dragonfly events
- **Type Safety**: TypeScript across all services
- **Real-time**: WebSocket and event streaming
- **Observability**: Comprehensive monitoring and logging
- **Developer Experience**: Fast development with hot reload

180
README.md
View file

@ -1,180 +0,0 @@
# 🤖 Stock Bot Trading System
A comprehensive trading bot built with Bun and Turborepo, featuring a service-oriented architecture for real-time market data processing and strategy execution.
## 🚀 Quick Start
### Prerequisites
- [Bun](https://bun.sh/) runtime
- Node.js 18+ (for compatibility)
### Installation
```bash
# Clone and install dependencies
git clone <your-repo-url>
cd stock-bot
bun install
```
### Running the System
#### Option 1: VS Code Tasks (Recommended)
1. Open the project in VS Code
2. Press `Ctrl+Shift+P` (or `Cmd+Shift+P` on Mac)
3. Type "Tasks: Run Task" and select it
4. Choose "Start All Services"
#### Option 2: Manual Startup
```bash
# Terminal 1: Start Market Data Gateway
cd apps/core-services/market-data-gateway
bun run dev
# Terminal 2: Start Trading Dashboard
cd apps/interface-services/trading-dashboard
bun run dev
```
### Access Points
- **Trading Dashboard**: http://localhost:5173
- **Market Data API**: http://localhost:3001
- **Health Check**: http://localhost:3001/health
## 📊 Dashboard Features
### Real-time Market Data
- Live price feeds for AAPL, GOOGL, MSFT, TSLA, AMZN
- WebSocket connections for real-time updates
- Service health monitoring
### Professional UI Components
- Built with Tremor UI for financial visualizations
- Interactive charts and metrics
- Responsive design for all devices
### Dashboard Tabs
1. **Market Data**: Live prices, volume, bid/ask spreads
2. **Portfolio**: Holdings allocation and performance
3. **Charts**: Price and volume analysis
4. **Performance**: Trading metrics and statistics
## 🏗️ Architecture
### Service-Oriented Design
```
apps/
├── core-services/
│ └── market-data-gateway/ # Market data ingestion
├── interface-services/
│ └── trading-dashboard/ # React dashboard
├── data-services/ # (Future) Data processing
├── execution-services/ # (Future) Order management
├── intelligence-services/ # (Future) Strategy engine
├── platform-services/ # (Future) Infrastructure
└── integration-services/ # (Future) External APIs
```
### Shared Packages
```
packages/
├── types/ # TypeScript definitions
├── config/ # Configuration management
├── database/ # (Future) Database utilities
└── trading-core/ # (Future) Core trading logic
```
## 🔧 Development
### Project Structure
- **Turborepo**: Monorepo management
- **Bun**: Package manager and runtime
- **TypeScript**: Type safety across all services
- **React + Vite**: Modern frontend development
- **Tremor UI**: Financial dashboard components
### Key Technologies
- **Backend**: Hono framework, WebSockets, Redis
- **Frontend**: React, TypeScript, Tremor UI
- **Data**: QuestDB (planned), PostgreSQL (planned)
- **Deployment**: Docker, Kubernetes (planned)
## 📈 Current Status
### ✅ Completed
- [x] Monorepo setup with Turborepo
- [x] Market Data Gateway service
- [x] Real-time WebSocket connections
- [x] Professional React dashboard
- [x] Tremor UI integration
- [x] TypeScript type system
- [x] Service health monitoring
### 🚧 In Progress
- [ ] Strategy execution engine
- [ ] Risk management system
- [ ] Portfolio tracking
- [ ] Real broker integration
### 🔮 Planned
- [ ] Advanced charting
- [ ] Backtesting framework
- [ ] Machine learning signals
- [ ] Multi-broker support
- [ ] Mobile application
## 🛠️ API Endpoints
### Market Data Gateway (Port 3001)
```
GET /health # Service health check
GET /api/market-data/:symbol # Current market data
GET /api/ohlcv/:symbol # Historical OHLCV data
WS ws://localhost:3001 # Real-time data stream
```
### Data Format
```typescript
interface MarketData {
symbol: string;
price: number;
bid: number;
ask: number;
volume: number;
timestamp: string;
}
```
## 🔧 Configuration
Environment variables are managed in `.env`:
```bash
# Database Configuration
DATABASE_URL=postgresql://...
QUESTDB_URL=http://localhost:9000
# External APIs
ALPHA_VANTAGE_API_KEY=your_key_here
ALPACA_API_KEY=your_key_here
# Service Configuration
NODE_ENV=development
LOG_LEVEL=info
```
## 🤝 Contributing
1. Fork the repository
2. Create a feature branch: `git checkout -b feature/amazing-feature`
3. Commit changes: `git commit -m 'Add amazing feature'`
4. Push to branch: `git push origin feature/amazing-feature`
5. Open a Pull Request
## 📝 License
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
## 🙏 Acknowledgments
- [Tremor UI](https://tremor.so/) for beautiful financial components
- [Bun](https://bun.sh/) for fast runtime and package management
- [Turborepo](https://turbo.build/) for monorepo tooling

View file

@ -1,62 +0,0 @@
# Stock Bot Project Structure Refactoring
This document outlines the changes made to improve separation of concerns in the stock-bot project architecture.
## Directory Structure Changes
1. Created a dedicated `libs/` directory (replacing the previous `packages/` approach)
2. Split monolithic type definitions into domain-specific modules
3. Created specialized libraries for cross-cutting concerns
## New Libraries
### 1. `@stock-bot/types`
Domain-specific type definitions organized by functional area:
- `market/` - Market data structures (OHLCV, OrderBook, etc.)
- `trading/` - Trading types (Orders, Positions, etc.)
- `strategy/` - Strategy and signal types
- `events/` - Event definitions for the event bus
- `api/` - Common API request/response types
- `config/` - Configuration type definitions
### 2. `@stock-bot/event-bus`
A Redis-based event bus implementation for inter-service communication:
- Publish/subscribe pattern for asynchronous messaging
- Support for typed events
- Reliable message delivery
### 3. `@stock-bot/utils`
Common utility functions shared across services:
- `dateUtils` - Date/time helpers for market data
- `financialUtils` - Financial calculations (Sharpe ratio, drawdown)
- `logger` - Standardized logging service
### 4. `@stock-bot/api-client`
Type-safe API clients for inter-service communication:
- `BaseApiClient` - Common HTTP client functionality
- Service-specific clients (BacktestClient, StrategyClient)
## Benefits of the New Architecture
1. **Better Separation of Concerns**
- Each library has a clear, focused purpose
- Domain types are logically grouped
2. **Improved Maintainability**
- Smaller, focused modules
- Clear dependencies between modules
3. **Type Safety**
- Consistent types across the entire system
- Better IDE autocompletion and error checking
4. **Reduced Duplication**
- Shared utilities in a central location
- Consistent implementation of common patterns
## Next Steps
1. Update service implementations to use the new libraries
2. Migrate from the old packages directory to the new libs structure
3. Implement domain-specific validations in each type module
4. Add unit tests for each library

View file

@ -1,196 +0,0 @@
# Market Data Gateway - Unified Implementation
## Overview
The Market Data Gateway is a unified service that consolidates real-time market data ingestion, processing, and distribution capabilities. This service has been created by merging the previous core-services and data-services market-data-gateway implementations into a single, comprehensive solution.
## Architecture
### Unified Design
- **Single Service**: Combines data ingestion, processing, and distribution in one service
- **HTTP API**: RESTful endpoints for configuration and data retrieval
- **WebSocket Server**: Real-time data streaming capabilities
- **Type Safety**: Full TypeScript implementation with comprehensive type definitions
### Key Components
- **Data Source Management**: Configure and manage multiple market data sources
- **Real-time Processing**: Stream processing pipelines for market data
- **WebSocket Streaming**: Real-time data distribution to clients
- **Health Monitoring**: Comprehensive health checks and metrics
- **Cache Management**: Redis-based caching for performance optimization
## Features
### HTTP Endpoints
#### Health & Status
- `GET /health` - Basic health check
- `GET /health/readiness` - Readiness probe
- `GET /health/liveness` - Liveness probe
- `GET /api/v1/gateway/status` - Gateway status and metrics
- `GET /api/v1/gateway/config` - Current configuration
#### Data Sources
- `GET /api/v1/sources` - List configured data sources
- `POST /api/v1/sources` - Add new data source
- `PUT /api/v1/sources/:sourceId` - Update data source
- `DELETE /api/v1/sources/:sourceId` - Remove data source
#### Market Data
- `GET /api/v1/data/tick/:symbol` - Latest tick data for symbol
- `GET /api/v1/data/candles/:symbol` - Historical candle data
- `GET /api/v1/subscriptions` - List active subscriptions
- `POST /api/v1/subscriptions` - Create new subscription
#### Metrics
- `GET /api/v1/metrics` - System and gateway metrics
### WebSocket Streaming
Connect to `ws://localhost:3005/ws` for real-time data streaming.
#### Message Types
**Subscribe to symbols:**
```json
{
"type": "subscribe",
"symbols": ["AAPL", "GOOGL", "MSFT"]
}
```
**Unsubscribe:**
```json
{
"type": "unsubscribe",
"subscriptionId": "sub_1234567890"
}
```
**Receive tick data:**
```json
{
"type": "tick",
"data": {
"symbol": "AAPL",
"price": 150.25,
"volume": 1000,
"timestamp": "2025-06-03T13:01:49.638Z",
"bid": 150.20,
"ask": 150.30
}
}
```
## Configuration
The service is configured through environment variables and the `GatewayConfig` interface:
### Environment Variables
- `PORT` - HTTP server port (default: 3004)
- `HOST` - Server host (default: 0.0.0.0)
- `REDIS_HOST` - Redis host (default: localhost)
- `REDIS_PORT` - Redis port (default: 6379)
- `REDIS_PASSWORD` - Redis password (optional)
- `REDIS_DB` - Redis database number (default: 0)
- `METRICS_PORT` - Metrics port (default: 9090)
### Configuration Structure
```typescript
interface GatewayConfig {
server: ServerConfig;
dataSources: DataSourceConfig[];
processing: ProcessingConfig;
cache: CacheConfig;
monitoring: MonitoringConfig;
}
```
## Development
### Prerequisites
- Bun runtime
- Redis server
- TypeScript
### Setup
```bash
cd apps/core-services/market-data-gateway
bun install
```
### Development Mode
```bash
bun run dev
```
### Build
```bash
bun run build
```
### Testing
The service includes mock data for testing purposes. When running, it will:
- Respond to health checks
- Provide mock tick and candle data
- Accept WebSocket connections
- Send simulated real-time data every 5 seconds
## Deployment
The service can be deployed using:
- Docker containers
- Kubernetes
- Direct Node.js/Bun deployment
### Docker
```dockerfile
FROM oven/bun:latest
WORKDIR /app
COPY package.json .
COPY src/ ./src/
RUN bun install
RUN bun run build
EXPOSE 3004 3005
CMD ["bun", "run", "start"]
```
## Migration Notes
This unified implementation replaces both:
- `apps/core-services/market-data-gateway` (original)
- `apps/data-services/market-data-gateway` (duplicate)
### Changes Made
1. **Consolidated Architecture**: Merged real-time and storage capabilities
2. **Fixed Type Issues**: Resolved all TypeScript compilation errors
3. **Simplified Configuration**: Aligned with `GatewayConfig` interface
4. **Working WebSocket**: Functional real-time streaming
5. **Comprehensive API**: Full REST API implementation
6. **Mock Data**: Testing capabilities with simulated data
### Removed Duplicates
- Removed `apps/data-services/market-data-gateway` directory
- Consolidated type definitions
- Unified configuration structure
## Future Enhancements
1. **Real Data Sources**: Replace mock data with actual market data feeds
2. **Advanced Processing**: Implement complex processing pipelines
3. **Persistence Layer**: Add database storage for historical data
4. **Authentication**: Add API authentication and authorization
5. **Rate Limiting**: Implement request rate limiting
6. **Monitoring**: Enhanced metrics and alerting
7. **Load Balancing**: Support for horizontal scaling
## Status
**COMPLETED**: TypeScript compilation errors resolved
**COMPLETED**: Unified service architecture
**COMPLETED**: Working HTTP and WebSocket servers
**COMPLETED**: Mock data implementation
**COMPLETED**: Health and metrics endpoints
**COMPLETED**: Duplicate service removal
The market data gateway merge is now complete and the service is fully operational.

View file

@ -1,58 +0,0 @@
{
"name": "@stock-bot/market-data-gateway",
"version": "1.0.0",
"description": "Unified market data gateway - real-time processing and historical storage",
"main": "src/index.ts",
"type": "module",
"scripts": {
"dev": "bun --watch src/index.ts",
"build": "tsc",
"start": "bun src/index.ts",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit"
},
"dependencies": {
"hono": "^4.6.3",
"@hono/node-server": "^1.8.0",
"ws": "^8.18.0",
"bull": "^4.12.0",
"ioredis": "^5.4.1",
"zod": "^3.22.0",
"uuid": "^9.0.0",
"compression": "^1.7.4",
"helmet": "^7.1.0",
"rate-limiter-flexible": "^5.0.0",
"node-cron": "^3.0.3",
"eventemitter3": "^5.0.1",
"fast-json-stringify": "^5.10.0",
"pino": "^8.17.0",
"dotenv": "^16.3.0",
"@stock-bot/http-client": "*",
"@stock-bot/config": "*",
"@stock-bot/types": "*",
"@stock-bot/event-bus": "*",
"@stock-bot/utils": "*"
},
"devDependencies": {
"@types/node": "^20.11.0",
"@types/ws": "^8.5.12",
"@types/uuid": "^9.0.0",
"@types/compression": "^1.7.5",
"@types/node-cron": "^3.0.11",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"keywords": [
"market-data",
"gateway",
"real-time",
"websocket",
"historical",
"stock-bot",
"core-services"
]
}

View file

@ -1,114 +0,0 @@
// Data Provider Configuration
export interface DataProviderConfig {
name: string;
type: 'rest' | 'websocket' | 'both';
enabled: boolean;
endpoints: {
quotes?: string;
candles?: string;
trades?: string;
websocket?: string;
};
authentication?: {
type: 'api_key' | 'bearer' | 'basic';
key?: string;
secret?: string;
token?: string;
};
rateLimits: {
requestsPerSecond: number;
requestsPerMinute: number;
requestsPerHour: number;
};
retryPolicy: {
maxRetries: number;
backoffMultiplier: number;
initialDelayMs: number;
};
timeout: number;
priority: number; // 1-10, higher is better
}
export const dataProviderConfigs: Record<string, DataProviderConfig> = {
'alpha-vantage': {
name: 'Alpha Vantage',
type: 'rest',
enabled: true,
endpoints: {
quotes: 'https://www.alphavantage.co/query',
candles: 'https://www.alphavantage.co/query',
},
authentication: {
type: 'api_key',
key: process.env.ALPHA_VANTAGE_API_KEY,
},
rateLimits: {
requestsPerSecond: 5,
requestsPerMinute: 500,
requestsPerHour: 25000,
},
retryPolicy: {
maxRetries: 3,
backoffMultiplier: 2,
initialDelayMs: 1000,
},
timeout: 10000,
priority: 7,
},
'yahoo-finance': {
name: 'Yahoo Finance',
type: 'rest',
enabled: true,
endpoints: {
quotes: 'https://query1.finance.yahoo.com/v8/finance/chart',
candles: 'https://query1.finance.yahoo.com/v8/finance/chart',
},
rateLimits: {
requestsPerSecond: 10,
requestsPerMinute: 2000,
requestsPerHour: 100000,
},
retryPolicy: {
maxRetries: 3,
backoffMultiplier: 1.5,
initialDelayMs: 500,
},
timeout: 8000,
priority: 8,
},
'polygon': {
name: 'Polygon.io',
type: 'both',
enabled: false,
endpoints: {
quotes: 'https://api.polygon.io/v2/last/nbbo',
candles: 'https://api.polygon.io/v2/aggs/ticker',
trades: 'https://api.polygon.io/v3/trades',
websocket: 'wss://socket.polygon.io/stocks',
},
authentication: {
type: 'api_key',
key: process.env.POLYGON_API_KEY,
},
rateLimits: {
requestsPerSecond: 100,
requestsPerMinute: 5000,
requestsPerHour: 100000,
},
retryPolicy: {
maxRetries: 5,
backoffMultiplier: 2,
initialDelayMs: 200,
},
timeout: 5000,
priority: 9,
},
};
export function getEnabledProviders(): DataProviderConfig[] {
return Object.values(dataProviderConfigs).filter(config => config.enabled);
}
export function getProviderByPriority(): DataProviderConfig[] {
return getEnabledProviders().sort((a, b) => b.priority - a.priority);
}

View file

@ -1,449 +0,0 @@
import { Context } from 'hono';
import { MarketDataGatewayService } from '../services/MarketDataGatewayService';
import {
DataSourceConfig,
SubscriptionRequest,
ProcessingPipelineConfig,
Logger
} from '../types/MarketDataGateway';
export class GatewayController {
constructor(
private gatewayService: MarketDataGatewayService,
private logger: Logger
) {}
// Gateway status and control
async getStatus(c: Context) {
try {
const status = this.gatewayService.getStatus();
return c.json(status);
} catch (error) {
this.logger.error('Failed to get gateway status:', error);
return c.json({ error: 'Failed to get gateway status' }, 500);
}
}
async getHealth(c: Context) {
try {
const health = this.gatewayService.getHealth();
const statusCode = health.status === 'healthy' ? 200 :
health.status === 'degraded' ? 200 : 503;
return c.json(health, statusCode);
} catch (error) {
this.logger.error('Failed to get gateway health:', error);
return c.json({
status: 'unhealthy',
message: 'Health check failed',
timestamp: new Date().toISOString()
}, 503);
}
}
async getMetrics(c: Context) {
try {
const metrics = this.gatewayService.getMetrics();
return c.json(metrics);
} catch (error) {
this.logger.error('Failed to get gateway metrics:', error);
return c.json({ error: 'Failed to get gateway metrics' }, 500);
}
}
async getConfiguration(c: Context) {
try {
const config = this.gatewayService.getConfiguration();
return c.json(config);
} catch (error) {
this.logger.error('Failed to get gateway configuration:', error);
return c.json({ error: 'Failed to get gateway configuration' }, 500);
}
}
async updateConfiguration(c: Context) {
try {
const updates = await c.req.json();
await this.gatewayService.updateConfiguration(updates);
return c.json({
message: 'Configuration updated successfully',
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to update gateway configuration:', error);
return c.json({ error: 'Failed to update gateway configuration' }, 500);
}
}
// Data source management
async getDataSources(c: Context) {
try {
const sources = this.gatewayService.getDataSources();
return c.json({ dataSources: Array.from(sources.values()) });
} catch (error) {
this.logger.error('Failed to get data sources:', error);
return c.json({ error: 'Failed to get data sources' }, 500);
}
}
async getDataSource(c: Context) {
try {
const sourceId = c.req.param('sourceId');
const source = this.gatewayService.getDataSource(sourceId);
if (!source) {
return c.json({ error: 'Data source not found' }, 404);
}
return c.json(source);
} catch (error) {
this.logger.error('Failed to get data source:', error);
return c.json({ error: 'Failed to get data source' }, 500);
}
}
async addDataSource(c: Context) {
try {
const sourceConfig: DataSourceConfig = await c.req.json();
// Validate required fields
if (!sourceConfig.id || !sourceConfig.type || !sourceConfig.provider) {
return c.json({
error: 'Missing required fields: id, type, provider'
}, 400);
}
await this.gatewayService.addDataSource(sourceConfig);
return c.json({
message: 'Data source added successfully',
sourceId: sourceConfig.id,
timestamp: new Date().toISOString()
}, 201);
} catch (error) {
this.logger.error('Failed to add data source:', error);
return c.json({ error: 'Failed to add data source' }, 500);
}
}
async updateDataSource(c: Context) {
try {
const sourceId = c.req.param('sourceId');
const updates = await c.req.json();
await this.gatewayService.updateDataSource(sourceId, updates);
return c.json({
message: 'Data source updated successfully',
sourceId,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to update data source:', error);
return c.json({ error: 'Failed to update data source' }, 500);
}
}
async removeDataSource(c: Context) {
try {
const sourceId = c.req.param('sourceId');
await this.gatewayService.removeDataSource(sourceId);
return c.json({
message: 'Data source removed successfully',
sourceId,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to remove data source:', error);
return c.json({ error: 'Failed to remove data source' }, 500);
}
}
async startDataSource(c: Context) {
try {
const sourceId = c.req.param('sourceId');
await this.gatewayService.startDataSource(sourceId);
return c.json({
message: 'Data source started successfully',
sourceId,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to start data source:', error);
return c.json({ error: 'Failed to start data source' }, 500);
}
}
async stopDataSource(c: Context) {
try {
const sourceId = c.req.param('sourceId');
await this.gatewayService.stopDataSource(sourceId);
return c.json({
message: 'Data source stopped successfully',
sourceId,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to stop data source:', error);
return c.json({ error: 'Failed to stop data source' }, 500);
}
}
// Subscription management
async getSubscriptions(c: Context) {
try {
const subscriptions = this.gatewayService.getSubscriptions();
return c.json({ subscriptions: Array.from(subscriptions.values()) });
} catch (error) {
this.logger.error('Failed to get subscriptions:', error);
return c.json({ error: 'Failed to get subscriptions' }, 500);
}
}
async getSubscription(c: Context) {
try {
const subscriptionId = c.req.param('subscriptionId');
const subscription = this.gatewayService.getSubscription(subscriptionId);
if (!subscription) {
return c.json({ error: 'Subscription not found' }, 404);
}
return c.json(subscription);
} catch (error) {
this.logger.error('Failed to get subscription:', error);
return c.json({ error: 'Failed to get subscription' }, 500);
}
}
async createSubscription(c: Context) {
try {
const subscriptionRequest: SubscriptionRequest = await c.req.json();
// Validate required fields
if (!subscriptionRequest.clientId || !subscriptionRequest.symbols || subscriptionRequest.symbols.length === 0) {
return c.json({
error: 'Missing required fields: clientId, symbols'
}, 400);
}
const subscriptionId = await this.gatewayService.subscribe(subscriptionRequest);
return c.json({
message: 'Subscription created successfully',
subscriptionId,
clientId: subscriptionRequest.clientId,
symbols: subscriptionRequest.symbols,
timestamp: new Date().toISOString()
}, 201);
} catch (error) {
this.logger.error('Failed to create subscription:', error);
return c.json({ error: 'Failed to create subscription' }, 500);
}
}
async updateSubscription(c: Context) {
try {
const subscriptionId = c.req.param('subscriptionId');
const updates = await c.req.json();
await this.gatewayService.updateSubscription(subscriptionId, updates);
return c.json({
message: 'Subscription updated successfully',
subscriptionId,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to update subscription:', error);
return c.json({ error: 'Failed to update subscription' }, 500);
}
}
async deleteSubscription(c: Context) {
try {
const subscriptionId = c.req.param('subscriptionId');
await this.gatewayService.unsubscribe(subscriptionId);
return c.json({
message: 'Subscription deleted successfully',
subscriptionId,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to delete subscription:', error);
return c.json({ error: 'Failed to delete subscription' }, 500);
}
}
// Processing pipeline management
async getProcessingPipelines(c: Context) {
try {
const pipelines = this.gatewayService.getProcessingPipelines();
return c.json({ pipelines: Array.from(pipelines.values()) });
} catch (error) {
this.logger.error('Failed to get processing pipelines:', error);
return c.json({ error: 'Failed to get processing pipelines' }, 500);
}
}
async getProcessingPipeline(c: Context) {
try {
const pipelineId = c.req.param('pipelineId');
const pipeline = this.gatewayService.getProcessingPipeline(pipelineId);
if (!pipeline) {
return c.json({ error: 'Processing pipeline not found' }, 404);
}
return c.json(pipeline);
} catch (error) {
this.logger.error('Failed to get processing pipeline:', error);
return c.json({ error: 'Failed to get processing pipeline' }, 500);
}
}
async createProcessingPipeline(c: Context) {
try {
const pipelineConfig: ProcessingPipelineConfig = await c.req.json();
// Validate required fields
if (!pipelineConfig.id || !pipelineConfig.name || !pipelineConfig.processors) {
return c.json({
error: 'Missing required fields: id, name, processors'
}, 400);
}
await this.gatewayService.addProcessingPipeline(pipelineConfig);
return c.json({
message: 'Processing pipeline created successfully',
pipelineId: pipelineConfig.id,
timestamp: new Date().toISOString()
}, 201);
} catch (error) {
this.logger.error('Failed to create processing pipeline:', error);
return c.json({ error: 'Failed to create processing pipeline' }, 500);
}
}
async updateProcessingPipeline(c: Context) {
try {
const pipelineId = c.req.param('pipelineId');
const updates = await c.req.json();
await this.gatewayService.updateProcessingPipeline(pipelineId, updates);
return c.json({
message: 'Processing pipeline updated successfully',
pipelineId,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to update processing pipeline:', error);
return c.json({ error: 'Failed to update processing pipeline' }, 500);
}
}
async deleteProcessingPipeline(c: Context) {
try {
const pipelineId = c.req.param('pipelineId');
await this.gatewayService.removeProcessingPipeline(pipelineId);
return c.json({
message: 'Processing pipeline deleted successfully',
pipelineId,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to delete processing pipeline:', error);
return c.json({ error: 'Failed to delete processing pipeline' }, 500);
}
}
// Market data queries
async getLatestTick(c: Context) {
try {
const symbol = c.req.param('symbol');
if (!symbol) {
return c.json({ error: 'Symbol parameter is required' }, 400);
}
const tick = await this.gatewayService.getLatestTick(symbol);
if (!tick) {
return c.json({ error: 'No data available for symbol' }, 404);
}
return c.json(tick);
} catch (error) {
this.logger.error('Failed to get latest tick:', error);
return c.json({ error: 'Failed to get latest tick' }, 500);
}
}
async getCandles(c: Context) {
try {
const symbol = c.req.param('symbol');
const timeframe = c.req.query('timeframe') || '1m';
const startTime = c.req.query('startTime');
const endTime = c.req.query('endTime');
const limit = parseInt(c.req.query('limit') || '100');
if (!symbol) {
return c.json({ error: 'Symbol parameter is required' }, 400);
}
const candles = await this.gatewayService.getCandles(
symbol,
timeframe,
startTime ? parseInt(startTime) : undefined,
endTime ? parseInt(endTime) : undefined,
limit
);
return c.json({
symbol,
timeframe,
candles,
count: candles.length,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to get candles:', error);
return c.json({ error: 'Failed to get candles' }, 500);
}
}
// System operations
async flushCache(c: Context) {
try {
await this.gatewayService.flushCache();
return c.json({
message: 'Cache flushed successfully',
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to flush cache:', error);
return c.json({ error: 'Failed to flush cache' }, 500);
}
}
async restart(c: Context) {
try {
await this.gatewayService.restart();
return c.json({
message: 'Gateway restart initiated',
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to restart gateway:', error);
return c.json({ error: 'Failed to restart gateway' }, 500);
}
}
}

View file

@ -1,146 +0,0 @@
import { Context } from 'hono';
import { MarketDataGatewayService } from '../services/MarketDataGatewayService';
import { Logger } from '../types/MarketDataGateway';
export class HealthController {
constructor(
private gatewayService: MarketDataGatewayService,
private logger: Logger
) {}
async getHealth(c: Context) {
try {
const health = this.gatewayService.getHealth();
const statusCode = health.status === 'healthy' ? 200 :
health.status === 'degraded' ? 200 : 503;
return c.json(health, statusCode);
} catch (error) {
this.logger.error('Health check failed:', error);
return c.json({
status: 'unhealthy',
message: 'Health check failed',
timestamp: new Date().toISOString(),
error: error instanceof Error ? error.message : 'Unknown error'
}, 503);
}
}
async getReadiness(c: Context) {
try {
const health = this.gatewayService.getHealth();
const components = health.details?.components || {};
// Check if all critical components are ready
const criticalComponents = [
'dataSourceManager',
'processingEngine',
'subscriptionManager'
];
const allReady = criticalComponents.every(component => {
const componentHealth = components[component];
return componentHealth && componentHealth.status === 'healthy';
});
const readinessStatus = {
status: allReady ? 'ready' : 'not-ready',
message: allReady ? 'All critical components are ready' : 'Some critical components are not ready',
timestamp: new Date().toISOString(),
components: Object.fromEntries(
criticalComponents.map(component => [
component,
components[component]?.status || 'unknown'
])
)
};
const statusCode = allReady ? 200 : 503;
return c.json(readinessStatus, statusCode);
} catch (error) {
this.logger.error('Readiness check failed:', error);
return c.json({
status: 'not-ready',
message: 'Readiness check failed',
timestamp: new Date().toISOString(),
error: error instanceof Error ? error.message : 'Unknown error'
}, 503);
}
}
async getLiveness(c: Context) {
try {
// Basic liveness check - just verify the service is responding
const uptime = process.uptime();
const memoryUsage = process.memoryUsage();
return c.json({
status: 'alive',
message: 'Service is alive and responding',
timestamp: new Date().toISOString(),
uptime: Math.floor(uptime),
memory: {
rss: Math.round(memoryUsage.rss / 1024 / 1024), // MB
heapUsed: Math.round(memoryUsage.heapUsed / 1024 / 1024), // MB
heapTotal: Math.round(memoryUsage.heapTotal / 1024 / 1024), // MB
}
});
} catch (error) {
this.logger.error('Liveness check failed:', error);
return c.json({
status: 'dead',
message: 'Liveness check failed',
timestamp: new Date().toISOString(),
error: error instanceof Error ? error.message : 'Unknown error'
}, 503);
}
}
async getComponentHealth(c: Context) {
try {
const component = c.req.param('component');
const health = this.gatewayService.getHealth();
const components = health.details?.components || {};
if (!components[component]) {
return c.json({
error: 'Component not found',
availableComponents: Object.keys(components)
}, 404);
}
return c.json({
component,
...components[component],
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Component health check failed:', error);
return c.json({
error: 'Component health check failed',
message: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async getDetailedHealth(c: Context) {
try {
const health = this.gatewayService.getHealth();
const metrics = this.gatewayService.getMetrics();
const status = this.gatewayService.getStatus();
return c.json({
health,
metrics,
status,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Detailed health check failed:', error);
return c.json({
error: 'Detailed health check failed',
message: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
}

View file

@ -1,330 +0,0 @@
import { Context } from 'hono';
import { MetricsCollector } from '../services/MetricsCollector';
import { Logger } from '../types/MarketDataGateway';
export class MetricsController {
constructor(
private metricsCollector: MetricsCollector,
private logger: Logger
) {}
async getMetrics(c: Context) {
try {
const format = c.req.query('format') || 'json';
if (format === 'prometheus') {
const prometheusMetrics = this.metricsCollector.exportMetrics('prometheus');
return c.text(prometheusMetrics, 200, {
'Content-Type': 'text/plain; charset=utf-8'
});
}
const metrics = this.metricsCollector.getAggregatedMetrics();
return c.json(metrics);
} catch (error) {
this.logger.error('Failed to get metrics:', error);
return c.json({ error: 'Failed to get metrics' }, 500);
}
}
async getMetric(c: Context) {
try {
const metricName = c.req.param('metricName');
const duration = c.req.query('duration');
if (!metricName) {
return c.json({ error: 'Metric name is required' }, 400);
}
const durationMs = duration ? parseInt(duration) * 1000 : undefined;
const metricData = this.metricsCollector.getMetric(metricName, durationMs);
return c.json({
metric: metricName,
duration: duration || 'all',
data: metricData,
count: metricData.length
});
} catch (error) {
this.logger.error('Failed to get metric:', error);
return c.json({ error: 'Failed to get metric' }, 500);
}
}
async getMetricAverage(c: Context) {
try {
const metricName = c.req.param('metricName');
const duration = c.req.query('duration');
if (!metricName) {
return c.json({ error: 'Metric name is required' }, 400);
}
const durationMs = duration ? parseInt(duration) * 1000 : undefined;
const average = this.metricsCollector.getAverageMetric(metricName, durationMs);
return c.json({
metric: metricName,
duration: duration || 'all',
average,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to get metric average:', error);
return c.json({ error: 'Failed to get metric average' }, 500);
}
}
async getMetricLatest(c: Context) {
try {
const metricName = c.req.param('metricName');
if (!metricName) {
return c.json({ error: 'Metric name is required' }, 400);
}
const latest = this.metricsCollector.getLatestMetric(metricName);
if (latest === null) {
return c.json({ error: 'Metric not found or no data available' }, 404);
}
return c.json({
metric: metricName,
value: latest,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to get latest metric:', error);
return c.json({ error: 'Failed to get latest metric' }, 500);
}
}
async getMetricRate(c: Context) {
try {
const metricName = c.req.param('metricName');
const duration = c.req.query('duration') || '60'; // Default 60 seconds
if (!metricName) {
return c.json({ error: 'Metric name is required' }, 400);
}
const durationMs = parseInt(duration) * 1000;
const rate = this.metricsCollector.getRate(metricName, durationMs);
return c.json({
metric: metricName,
duration: duration,
rate: rate,
unit: 'per second',
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to get metric rate:', error);
return c.json({ error: 'Failed to get metric rate' }, 500);
}
}
async getMetricPercentile(c: Context) {
try {
const metricName = c.req.param('metricName');
const percentile = c.req.query('percentile') || '95';
const duration = c.req.query('duration');
if (!metricName) {
return c.json({ error: 'Metric name is required' }, 400);
}
const percentileValue = parseFloat(percentile);
if (isNaN(percentileValue) || percentileValue < 0 || percentileValue > 100) {
return c.json({ error: 'Percentile must be a number between 0 and 100' }, 400);
}
const durationMs = duration ? parseInt(duration) * 1000 : undefined;
const value = this.metricsCollector.getPercentile(metricName, percentileValue, durationMs);
return c.json({
metric: metricName,
percentile: percentileValue,
value,
duration: duration || 'all',
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to get metric percentile:', error);
return c.json({ error: 'Failed to get metric percentile' }, 500);
}
}
async getAlerts(c: Context) {
try {
const activeOnly = c.req.query('active') === 'true';
if (activeOnly) {
const alerts = this.metricsCollector.getActiveAlerts();
return c.json({ alerts, count: alerts.length });
}
const rules = this.metricsCollector.getAlertRules();
const alerts = this.metricsCollector.getActiveAlerts();
return c.json({
alertRules: rules,
activeAlerts: alerts,
rulesCount: rules.length,
activeCount: alerts.length
});
} catch (error) {
this.logger.error('Failed to get alerts:', error);
return c.json({ error: 'Failed to get alerts' }, 500);
}
}
async addAlertRule(c: Context) {
try {
const rule = await c.req.json();
// Validate required fields
if (!rule.id || !rule.metric || !rule.condition || rule.threshold === undefined) {
return c.json({
error: 'Missing required fields: id, metric, condition, threshold'
}, 400);
}
// Validate condition
const validConditions = ['gt', 'lt', 'eq', 'gte', 'lte'];
if (!validConditions.includes(rule.condition)) {
return c.json({
error: `Invalid condition. Must be one of: ${validConditions.join(', ')}`
}, 400);
}
this.metricsCollector.addAlertRule(rule);
return c.json({
message: 'Alert rule added successfully',
ruleId: rule.id,
timestamp: new Date().toISOString()
}, 201);
} catch (error) {
this.logger.error('Failed to add alert rule:', error);
return c.json({ error: 'Failed to add alert rule' }, 500);
}
}
async removeAlertRule(c: Context) {
try {
const ruleId = c.req.param('ruleId');
if (!ruleId) {
return c.json({ error: 'Rule ID is required' }, 400);
}
this.metricsCollector.removeAlertRule(ruleId);
return c.json({
message: 'Alert rule removed successfully',
ruleId,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to remove alert rule:', error);
return c.json({ error: 'Failed to remove alert rule' }, 500);
}
}
async recordCustomMetric(c: Context) {
try {
const { name, value, labels, type = 'gauge' } = await c.req.json();
if (!name || value === undefined) {
return c.json({
error: 'Missing required fields: name, value'
}, 400);
}
if (typeof value !== 'number') {
return c.json({
error: 'Value must be a number'
}, 400);
}
switch (type) {
case 'gauge':
this.metricsCollector.setGauge(name, value, labels);
break;
case 'counter':
this.metricsCollector.incrementCounter(name, value, labels);
break;
case 'histogram':
this.metricsCollector.recordHistogram(name, value, labels);
break;
default:
return c.json({
error: 'Invalid metric type. Must be one of: gauge, counter, histogram'
}, 400);
}
return c.json({
message: 'Custom metric recorded successfully',
name,
value,
type,
timestamp: new Date().toISOString()
});
} catch (error) {
this.logger.error('Failed to record custom metric:', error);
return c.json({ error: 'Failed to record custom metric' }, 500);
}
}
async getSystemMetrics(c: Context) {
try {
const process = require('process');
const uptime = process.uptime();
const memoryUsage = process.memoryUsage();
const cpuUsage = process.cpuUsage();
const systemMetrics = {
uptime: Math.floor(uptime),
memory: {
rss: Math.round(memoryUsage.rss / 1024 / 1024), // MB
heapUsed: Math.round(memoryUsage.heapUsed / 1024 / 1024), // MB
heapTotal: Math.round(memoryUsage.heapTotal / 1024 / 1024), // MB
external: Math.round(memoryUsage.external / 1024 / 1024), // MB
},
cpu: {
user: cpuUsage.user,
system: cpuUsage.system,
},
timestamp: new Date().toISOString()
};
return c.json(systemMetrics);
} catch (error) {
this.logger.error('Failed to get system metrics:', error);
return c.json({ error: 'Failed to get system metrics' }, 500);
}
}
async exportMetrics(c: Context) {
try {
const format = c.req.query('format') || 'json';
const exported = this.metricsCollector.exportMetrics(format);
if (format === 'prometheus') {
return c.text(exported, 200, {
'Content-Type': 'text/plain; charset=utf-8'
});
}
return c.text(exported, 200, {
'Content-Type': 'application/json',
'Content-Disposition': `attachment; filename="metrics-${new Date().toISOString()}.json"`
});
} catch (error) {
this.logger.error('Failed to export metrics:', error);
return c.json({ error: 'Failed to export metrics' }, 500);
}
}
}

View file

@ -1,651 +0,0 @@
// Market Data Gateway - Enhanced Implementation
import { Hono } from 'hono';
import { cors } from 'hono/cors';
import { logger } from 'hono/logger';
import { prettyJSON } from 'hono/pretty-json';
import { WebSocketServer } from 'ws';
// Types
import { GatewayConfig } from './types/MarketDataGateway';
// Services
import { DataNormalizer, DataNormalizationResult } from './services/DataNormalizer';
import { MarketDataCache } from './services/AdvancedCache';
import { ConnectionPoolManager } from './services/ConnectionPoolManager';
import { dataProviderConfigs, getEnabledProviders } from './config/DataProviderConfig';
// Simple logger interface
interface Logger {
info: (message: string, ...args: any[]) => void;
error: (message: string, ...args: any[]) => void;
warn: (message: string, ...args: any[]) => void;
debug: (message: string, ...args: any[]) => void;
}
// Create application logger
const appLogger: Logger = {
info: (message: string, ...args: any[]) => console.log(`[MDG-ENHANCED] [INFO] ${message}`, ...args),
error: (message: string, ...args: any[]) => console.error(`[MDG-ENHANCED] [ERROR] ${message}`, ...args),
warn: (message: string, ...args: any[]) => console.warn(`[MDG-ENHANCED] [WARN] ${message}`, ...args),
debug: (message: string, ...args: any[]) => console.debug(`[MDG-ENHANCED] [DEBUG] ${message}`, ...args),
};
// Initialize services
const dataNormalizer = new DataNormalizer();
const marketDataCache = new MarketDataCache();
const connectionPool = new ConnectionPoolManager({
maxConnections: 100,
maxConnectionsPerHost: 20,
connectionTimeout: 10000,
requestTimeout: 30000,
retryAttempts: 3,
retryDelay: 1000,
keepAlive: true,
maxIdleTime: 300000, // 5 minutes
});
// Configuration matching the GatewayConfig interface
const config: GatewayConfig = {
server: {
port: parseInt(process.env.PORT || '3004'),
host: process.env.HOST || '0.0.0.0',
maxConnections: 1000,
cors: {
origins: ['http://localhost:3000', 'http://localhost:3001', 'http://localhost:8080'],
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
headers: ['Content-Type', 'Authorization'],
},
}, dataSources: getEnabledProviders().map(provider => ({
id: provider.name.toLowerCase().replace(/\s+/g, '-'),
name: provider.name,
type: provider.type === 'both' ? 'websocket' : provider.type as any,
provider: provider.name,
enabled: provider.enabled,
priority: provider.priority,
rateLimit: {
requestsPerSecond: provider.rateLimits.requestsPerSecond,
burstLimit: provider.rateLimits.requestsPerMinute,
},
connection: {
url: provider.endpoints.quotes || provider.endpoints.websocket || '',
authentication: provider.authentication ? {
type: provider.authentication.type === 'api_key' ? 'apikey' as const : 'basic' as const,
credentials: {
apiKey: provider.authentication.key || '',
secret: provider.authentication.secret || '',
token: provider.authentication.token || '',
},
} : undefined,
},
subscriptions: {
quotes: true,
trades: true,
orderbook: provider.endpoints.websocket ? true : false,
candles: true,
news: false,
},
symbols: ['AAPL', 'GOOGL', 'MSFT', 'AMZN', 'TSLA'], // Default symbols
retryPolicy: {
maxRetries: provider.retryPolicy.maxRetries,
backoffMultiplier: provider.retryPolicy.backoffMultiplier,
maxBackoffMs: provider.retryPolicy.initialDelayMs * 10,
},
healthCheck: {
intervalMs: 30000,
timeoutMs: provider.timeout,
expectedLatencyMs: 1000,
},
})),
processing: {
pipelines: [],
bufferSize: 10000,
batchSize: 100,
flushIntervalMs: 1000,
}, cache: {
redis: {
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT || '6379'),
password: process.env.REDIS_PASSWORD,
db: parseInt(process.env.REDIS_DB || '0'),
},
ttl: {
quotes: 60000, // 1 minute
trades: 300000, // 5 minutes
candles: 86400000, // 24 hours
orderbook: 30000, // 30 seconds
},
}, monitoring: {
metrics: {
enabled: true,
port: parseInt(process.env.METRICS_PORT || '9090'),
intervalMs: 5000,
retention: '24h',
},
alerts: {
enabled: true,
thresholds: {
latency: 1000,
latencyMs: 1000,
errorRate: 0.05,
connectionLoss: 3,
},
},
},
};
// Global state variables
let webSocketServer: WebSocketServer | null = null;
let isShuttingDown = false;
// Create Hono application
const app = new Hono();
// Middleware setup
app.use('*', logger());
app.use('*', prettyJSON());
app.use('*', cors({
origin: config.server.cors.origins,
allowMethods: config.server.cors.methods,
allowHeaders: config.server.cors.headers,
}));
// Mock data for testing
const mockTickData = {
symbol: 'AAPL',
price: 150.25,
volume: 1000,
timestamp: new Date().toISOString(),
bid: 150.20,
ask: 150.30,
};
const mockCandleData = {
symbol: 'AAPL',
timeframe: '1m',
timestamp: new Date().toISOString(),
open: 150.00,
high: 150.50,
low: 149.75,
close: 150.25,
volume: 5000,
};
// Health endpoints
app.get('/health', async (c) => {
return c.json({
status: 'healthy',
timestamp: new Date().toISOString(),
service: 'market-data-gateway',
version: '1.0.0',
});
});
app.get('/health/readiness', async (c) => {
return c.json({
status: 'ready',
timestamp: new Date().toISOString(),
checks: {
webSocket: webSocketServer ? 'connected' : 'disconnected',
cache: 'available',
},
});
});
app.get('/health/liveness', async (c) => {
return c.json({
status: 'alive',
timestamp: new Date().toISOString(),
uptime: process.uptime(),
});
});
// Gateway status endpoints
app.get('/api/v1/gateway/status', async (c) => {
return c.json({
status: 'running',
dataSources: config.dataSources.length,
activeConnections: webSocketServer ? webSocketServer.clients.size : 0,
timestamp: new Date().toISOString(),
});
});
app.get('/api/v1/gateway/config', async (c) => {
return c.json({
server: config.server,
dataSourcesCount: config.dataSources.length,
processingConfig: config.processing,
monitoring: config.monitoring,
});
});
// Data source management endpoints
app.get('/api/v1/sources', async (c) => {
return c.json({
dataSources: config.dataSources,
total: config.dataSources.length,
});
});
app.post('/api/v1/sources', async (c) => {
try {
const newSource = await c.req.json();
// In a real implementation, validate and add the data source
return c.json({
message: 'Data source configuration received',
source: newSource,
status: 'pending_validation',
});
} catch (error) {
return c.json({ error: 'Invalid data source configuration' }, 400);
}
});
// Subscription management endpoints
app.get('/api/v1/subscriptions', async (c) => {
return c.json({
subscriptions: [],
total: 0,
message: 'Subscription management not yet implemented',
});
});
app.post('/api/v1/subscriptions', async (c) => {
try {
const subscription = await c.req.json();
return c.json({
subscriptionId: `sub_${Date.now()}`,
status: 'active',
subscription,
});
} catch (error) {
return c.json({ error: 'Invalid subscription request' }, 400);
}
});
// Market data endpoints with enhanced functionality
app.get('/api/v1/data/tick/:symbol', async (c) => {
const symbol = c.req.param('symbol').toUpperCase();
const source = c.req.query('source') || 'yahoo-finance';
try {
// Check cache first
const cacheKey = marketDataCache.getQuoteKey(symbol);
const cachedData = marketDataCache.get(cacheKey);
if (cachedData) {
appLogger.debug(`Cache hit for ${symbol}`);
return c.json({
...cachedData,
cached: true,
timestamp: new Date().toISOString(),
});
}
// Fetch from provider
const provider = dataProviderConfigs[source];
if (!provider || !provider.enabled) {
return c.json({ error: 'Data source not available' }, 400);
}
// Mock data for now (replace with actual API calls)
const mockData = {
symbol,
price: 150.25 + (Math.random() - 0.5) * 10,
volume: Math.floor(Math.random() * 100000),
timestamp: new Date().toISOString(),
bid: 150.20,
ask: 150.30,
source,
};
// Normalize the data
const normalizedResult = dataNormalizer.normalizeMarketData(mockData, source);
if (!normalizedResult.success) {
return c.json({ error: normalizedResult.error }, 500);
}
// Cache the result
marketDataCache.setQuote(symbol, normalizedResult.data);
return c.json({
...normalizedResult.data,
cached: false,
processingTimeMs: normalizedResult.processingTimeMs,
});
} catch (error) {
appLogger.error(`Error fetching tick data for ${symbol}:`, error);
return c.json({ error: 'Internal server error' }, 500);
}
});
app.get('/api/v1/data/candles/:symbol', async (c) => {
const symbol = c.req.param('symbol').toUpperCase();
const timeframe = c.req.query('timeframe') || '1m';
const limit = Math.min(parseInt(c.req.query('limit') || '100'), 1000); // Max 1000
const source = c.req.query('source') || 'yahoo-finance';
try {
// Generate cache key
const cacheKey = `candles:${symbol}:${timeframe}:${limit}`;
const cachedData = marketDataCache.get(cacheKey);
if (cachedData) {
appLogger.debug(`Cache hit for candles ${symbol}:${timeframe}`);
return c.json({
candles: cachedData,
cached: true,
count: cachedData.length,
});
}
// Mock candle data generation (replace with actual API calls)
const candles = Array.from({ length: limit }, (_, i) => {
const timestamp = new Date(Date.now() - i * 60000);
const basePrice = 150 + (Math.random() - 0.5) * 20;
const variation = (Math.random() - 0.5) * 2;
return {
symbol,
timeframe,
timestamp: timestamp.toISOString(),
open: basePrice + variation,
high: basePrice + variation + Math.random() * 2,
low: basePrice + variation - Math.random() * 2,
close: basePrice + variation + (Math.random() - 0.5),
volume: Math.floor(Math.random() * 10000),
source,
};
}).reverse(); // Oldest first
// Normalize OHLCV data
const normalizedResult = dataNormalizer.normalizeOHLCV(
{ candles: candles.map(c => ({ ...c, timestamp: new Date(c.timestamp) })) },
source
);
if (!normalizedResult.success) {
return c.json({ error: normalizedResult.error }, 500);
}
// Cache the result
marketDataCache.set(cacheKey, normalizedResult.data, marketDataCache['getCandleTTL'](timeframe));
return c.json({
candles: normalizedResult.data,
cached: false,
count: normalizedResult.data?.length || 0,
processingTimeMs: normalizedResult.processingTimeMs,
});
} catch (error) {
appLogger.error(`Error fetching candles for ${symbol}:`, error);
return c.json({ error: 'Internal server error' }, 500);
}
});
// Enhanced metrics endpoints
app.get('/api/v1/metrics', async (c) => {
const cacheStats = marketDataCache.getStats();
const connectionStats = connectionPool.getStats();
return c.json({
system: {
uptime: process.uptime(),
memoryUsage: process.memoryUsage(),
cpuUsage: process.cpuUsage(),
},
gateway: {
activeConnections: webSocketServer ? webSocketServer.clients.size : 0,
dataSourcesCount: config.dataSources.filter(ds => ds.enabled).length,
messagesProcessed: 0,
},
cache: cacheStats,
connectionPool: connectionStats,
timestamp: new Date().toISOString(),
});
});
// Data quality assessment endpoint
app.get('/api/v1/data/quality/:symbol', async (c) => {
const symbol = c.req.param('symbol').toUpperCase();
const source = c.req.query('source') || 'yahoo-finance';
try {
// Get recent data for quality assessment (mock for now)
const recentData = Array.from({ length: 10 }, (_, i) => ({
symbol,
price: 150 + (Math.random() - 0.5) * 10,
bid: 149.5,
ask: 150.5,
volume: Math.floor(Math.random() * 10000),
timestamp: new Date(Date.now() - i * 60000),
}));
const qualityMetrics = dataNormalizer.assessDataQuality(recentData, source);
return c.json({
symbol,
source,
dataPoints: recentData.length,
qualityMetrics,
timestamp: new Date().toISOString(),
});
} catch (error) {
appLogger.error(`Error assessing data quality for ${symbol}:`, error);
return c.json({ error: 'Internal server error' }, 500);
}
});
// Cache management endpoints
app.get('/api/v1/cache/stats', async (c) => {
return c.json({
stats: marketDataCache.getStats(),
keys: marketDataCache.keys().slice(0, 100), // Limit to first 100 keys
timestamp: new Date().toISOString(),
});
});
app.delete('/api/v1/cache/clear', async (c) => {
marketDataCache.clear();
return c.json({
message: 'Cache cleared successfully',
timestamp: new Date().toISOString(),
});
});
app.delete('/api/v1/cache/key/:key', async (c) => {
const key = c.req.param('key');
const deleted = marketDataCache.delete(key);
return c.json({
message: deleted ? 'Key deleted successfully' : 'Key not found',
key,
deleted,
timestamp: new Date().toISOString(),
});
});
// Data providers status endpoint
app.get('/api/v1/providers', async (c) => {
const providers = Object.values(dataProviderConfigs).map(provider => ({
name: provider.name,
enabled: provider.enabled,
type: provider.type,
priority: provider.priority,
rateLimits: provider.rateLimits,
endpoints: Object.keys(provider.endpoints),
}));
return c.json({
providers,
enabled: providers.filter(p => p.enabled).length,
total: providers.length,
timestamp: new Date().toISOString(),
});
});
// WebSocket server setup
function setupWebSocketServer(): void {
const wsPort = config.server.port + 1; // Use port + 1 for WebSocket
webSocketServer = new WebSocketServer({
port: wsPort,
perMessageDeflate: false,
});
webSocketServer.on('connection', (ws, request) => {
appLogger.info(`New WebSocket connection from ${request.socket.remoteAddress}`);
ws.on('message', async (message) => {
try {
const data = JSON.parse(message.toString());
switch (data.type) {
case 'subscribe':
if (data.symbols && Array.isArray(data.symbols)) {
const subscriptionId = `sub_${Date.now()}`;
ws.send(JSON.stringify({
type: 'subscription_confirmed',
subscriptionId,
symbols: data.symbols,
timestamp: new Date().toISOString(),
}));
// Send mock data every 5 seconds
const interval = setInterval(() => {
if (ws.readyState === ws.OPEN) {
data.symbols.forEach((symbol: string) => {
ws.send(JSON.stringify({
type: 'tick',
data: {
...mockTickData,
symbol: symbol.toUpperCase(),
price: mockTickData.price + (Math.random() - 0.5) * 2,
timestamp: new Date().toISOString(),
},
}));
});
} else {
clearInterval(interval);
}
}, 5000);
}
break;
case 'unsubscribe':
if (data.subscriptionId) {
ws.send(JSON.stringify({
type: 'unsubscription_confirmed',
subscriptionId: data.subscriptionId,
timestamp: new Date().toISOString(),
}));
}
break;
default:
ws.send(JSON.stringify({
type: 'error',
message: 'Unknown message type',
}));
}
} catch (error) {
ws.send(JSON.stringify({
type: 'error',
message: 'Invalid message format',
}));
}
});
ws.on('close', () => {
appLogger.info('WebSocket connection closed');
});
ws.on('error', (error) => {
appLogger.error('WebSocket error:', error);
});
});
appLogger.info(`WebSocket server listening on port ${wsPort}`);
}
// Enhanced graceful shutdown handler
async function gracefulShutdown(): Promise<void> {
if (isShuttingDown) return;
isShuttingDown = true;
appLogger.info('Initiating graceful shutdown...');
try {
// Close WebSocket server
if (webSocketServer) {
webSocketServer.clients.forEach((client) => {
client.terminate();
});
webSocketServer.close();
appLogger.info('WebSocket server closed');
}
// Close connection pool
await connectionPool.close();
appLogger.info('Connection pool closed');
// Clean up cache
marketDataCache.destroy();
appLogger.info('Cache destroyed');
appLogger.info('Graceful shutdown completed');
process.exit(0);
} catch (error) {
appLogger.error('Error during shutdown:', error);
process.exit(1);
}
}
// Enhanced start server function
async function startServer(): Promise<void> {
try {
appLogger.info('Starting Enhanced Market Data Gateway...');
// Initialize cache event listeners
marketDataCache.on('hit', (key) => appLogger.debug(`Cache hit: ${key}`));
marketDataCache.on('miss', (key) => appLogger.debug(`Cache miss: ${key}`));
marketDataCache.on('evict', (key) => appLogger.debug(`Cache evict: ${key}`));
// Initialize connection pool event listeners
connectionPool.on('connectionCreated', (host) => appLogger.debug(`Connection created for: ${host}`));
connectionPool.on('error', ({ host, error }) => appLogger.warn(`Connection error for ${host}: ${error}`));
// Setup WebSocket server
setupWebSocketServer();
// Setup graceful shutdown handlers
process.on('SIGTERM', gracefulShutdown);
process.on('SIGINT', gracefulShutdown);
// Log service status
appLogger.info(`HTTP server starting on ${config.server.host}:${config.server.port}`);
appLogger.info(`WebSocket server running on port ${config.server.port + 1}`);
appLogger.info(`Data sources configured: ${config.dataSources.length}`);
appLogger.info(`Enabled providers: ${config.dataSources.filter(ds => ds.enabled).length}`);
appLogger.info(`Cache max size: ${marketDataCache['config'].maxSize}`);
appLogger.info(`Connection pool max connections: ${connectionPool['config'].maxConnections}`);
appLogger.info('Enhanced Market Data Gateway started successfully');
} catch (error) {
appLogger.error('Failed to start server:', error);
process.exit(1);
}
}
// Start the application
if (require.main === module) {
startServer();
}
export default {
port: config.server.port,
fetch: app.fetch,
};

View file

@ -1,386 +0,0 @@
// Market Data Gateway - Unified Implementation
import { Hono } from 'hono';
import { cors } from 'hono/cors';
import { logger } from 'hono/logger';
import { prettyJSON } from 'hono/pretty-json';
import { WebSocketServer } from 'ws';
// Types
import { GatewayConfig } from './types/MarketDataGateway';
// Simple logger interface
interface Logger {
info: (message: string, ...args: any[]) => void;
error: (message: string, ...args: any[]) => void;
warn: (message: string, ...args: any[]) => void;
debug: (message: string, ...args: any[]) => void;
}
// Create application logger
const appLogger: Logger = {
info: (message: string, ...args: any[]) => console.log(`[MDG-UNIFIED] [INFO] ${message}`, ...args),
error: (message: string, ...args: any[]) => console.error(`[MDG-UNIFIED] [ERROR] ${message}`, ...args),
warn: (message: string, ...args: any[]) => console.warn(`[MDG-UNIFIED] [WARN] ${message}`, ...args),
debug: (message: string, ...args: any[]) => console.debug(`[MDG-UNIFIED] [DEBUG] ${message}`, ...args),
};
// Configuration matching the GatewayConfig interface
const config: GatewayConfig = {
server: {
port: parseInt(process.env.PORT || '3004'),
host: process.env.HOST || '0.0.0.0',
maxConnections: 1000,
cors: {
origins: ['http://localhost:3000', 'http://localhost:3001', 'http://localhost:8080'],
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
headers: ['Content-Type', 'Authorization'],
},
},
dataSources: [], // Array of DataSourceConfig, initially empty
processing: {
pipelines: [],
bufferSize: 10000,
batchSize: 100,
flushIntervalMs: 1000,
}, cache: {
redis: {
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT || '6379'),
password: process.env.REDIS_PASSWORD,
db: parseInt(process.env.REDIS_DB || '0'),
},
ttl: {
quotes: 60000, // 1 minute
trades: 300000, // 5 minutes
candles: 86400000, // 24 hours
orderbook: 30000, // 30 seconds
},
}, monitoring: {
metrics: {
enabled: true,
port: parseInt(process.env.METRICS_PORT || '9090'),
intervalMs: 5000,
retention: '24h',
},
alerts: {
enabled: true,
thresholds: {
latency: 1000,
latencyMs: 1000,
errorRate: 0.05,
connectionLoss: 3,
},
},
},
};
// Global state variables
let webSocketServer: WebSocketServer | null = null;
let isShuttingDown = false;
// Create Hono application
const app = new Hono();
// Middleware setup
app.use('*', logger());
app.use('*', prettyJSON());
app.use('*', cors({
origin: config.server.cors.origins,
allowMethods: config.server.cors.methods,
allowHeaders: config.server.cors.headers,
}));
// Mock data for testing
const mockTickData = {
symbol: 'AAPL',
price: 150.25,
volume: 1000,
timestamp: new Date().toISOString(),
bid: 150.20,
ask: 150.30,
};
const mockCandleData = {
symbol: 'AAPL',
timeframe: '1m',
timestamp: new Date().toISOString(),
open: 150.00,
high: 150.50,
low: 149.75,
close: 150.25,
volume: 5000,
};
// Health endpoints
app.get('/health', async (c) => {
return c.json({
status: 'healthy',
timestamp: new Date().toISOString(),
service: 'market-data-gateway',
version: '1.0.0',
});
});
app.get('/health/readiness', async (c) => {
return c.json({
status: 'ready',
timestamp: new Date().toISOString(),
checks: {
webSocket: webSocketServer ? 'connected' : 'disconnected',
cache: 'available',
},
});
});
app.get('/health/liveness', async (c) => {
return c.json({
status: 'alive',
timestamp: new Date().toISOString(),
uptime: process.uptime(),
});
});
// Gateway status endpoints
app.get('/api/v1/gateway/status', async (c) => {
return c.json({
status: 'running',
dataSources: config.dataSources.length,
activeConnections: webSocketServer ? webSocketServer.clients.size : 0,
timestamp: new Date().toISOString(),
});
});
app.get('/api/v1/gateway/config', async (c) => {
return c.json({
server: config.server,
dataSourcesCount: config.dataSources.length,
processingConfig: config.processing,
monitoring: config.monitoring,
});
});
// Data source management endpoints
app.get('/api/v1/sources', async (c) => {
return c.json({
dataSources: config.dataSources,
total: config.dataSources.length,
});
});
app.post('/api/v1/sources', async (c) => {
try {
const newSource = await c.req.json();
// In a real implementation, validate and add the data source
return c.json({
message: 'Data source configuration received',
source: newSource,
status: 'pending_validation',
});
} catch (error) {
return c.json({ error: 'Invalid data source configuration' }, 400);
}
});
// Subscription management endpoints
app.get('/api/v1/subscriptions', async (c) => {
return c.json({
subscriptions: [],
total: 0,
message: 'Subscription management not yet implemented',
});
});
app.post('/api/v1/subscriptions', async (c) => {
try {
const subscription = await c.req.json();
return c.json({
subscriptionId: `sub_${Date.now()}`,
status: 'active',
subscription,
});
} catch (error) {
return c.json({ error: 'Invalid subscription request' }, 400);
}
});
// Market data endpoints
app.get('/api/v1/data/tick/:symbol', async (c) => {
const symbol = c.req.param('symbol');
return c.json({
...mockTickData,
symbol: symbol.toUpperCase(),
});
});
app.get('/api/v1/data/candles/:symbol', async (c) => {
const symbol = c.req.param('symbol');
const timeframe = c.req.query('timeframe') || '1m';
const limit = parseInt(c.req.query('limit') || '100');
const candles = Array.from({ length: limit }, (_, i) => ({
...mockCandleData,
symbol: symbol.toUpperCase(),
timeframe,
timestamp: new Date(Date.now() - i * 60000).toISOString(),
}));
return c.json({ candles });
});
// Metrics endpoints
app.get('/api/v1/metrics', async (c) => {
return c.json({
system: {
uptime: process.uptime(),
memoryUsage: process.memoryUsage(),
cpuUsage: process.cpuUsage(),
},
gateway: {
activeConnections: webSocketServer ? webSocketServer.clients.size : 0,
dataSourcesCount: config.dataSources.length,
messagesProcessed: 0,
},
timestamp: new Date().toISOString(),
});
});
// WebSocket server setup
function setupWebSocketServer(): void {
const wsPort = config.server.port + 1; // Use port + 1 for WebSocket
webSocketServer = new WebSocketServer({
port: wsPort,
perMessageDeflate: false,
});
webSocketServer.on('connection', (ws, request) => {
appLogger.info(`New WebSocket connection from ${request.socket.remoteAddress}`);
ws.on('message', async (message) => {
try {
const data = JSON.parse(message.toString());
switch (data.type) {
case 'subscribe':
if (data.symbols && Array.isArray(data.symbols)) {
const subscriptionId = `sub_${Date.now()}`;
ws.send(JSON.stringify({
type: 'subscription_confirmed',
subscriptionId,
symbols: data.symbols,
timestamp: new Date().toISOString(),
}));
// Send mock data every 5 seconds
const interval = setInterval(() => {
if (ws.readyState === ws.OPEN) {
data.symbols.forEach((symbol: string) => {
ws.send(JSON.stringify({
type: 'tick',
data: {
...mockTickData,
symbol: symbol.toUpperCase(),
price: mockTickData.price + (Math.random() - 0.5) * 2,
timestamp: new Date().toISOString(),
},
}));
});
} else {
clearInterval(interval);
}
}, 5000);
}
break;
case 'unsubscribe':
if (data.subscriptionId) {
ws.send(JSON.stringify({
type: 'unsubscription_confirmed',
subscriptionId: data.subscriptionId,
timestamp: new Date().toISOString(),
}));
}
break;
default:
ws.send(JSON.stringify({
type: 'error',
message: 'Unknown message type',
}));
}
} catch (error) {
ws.send(JSON.stringify({
type: 'error',
message: 'Invalid message format',
}));
}
});
ws.on('close', () => {
appLogger.info('WebSocket connection closed');
});
ws.on('error', (error) => {
appLogger.error('WebSocket error:', error);
});
});
appLogger.info(`WebSocket server listening on port ${wsPort}`);
}
// Graceful shutdown handler
async function gracefulShutdown(): Promise<void> {
if (isShuttingDown) return;
isShuttingDown = true;
appLogger.info('Initiating graceful shutdown...');
try {
// Close WebSocket server
if (webSocketServer) {
webSocketServer.clients.forEach((client) => {
client.terminate();
});
webSocketServer.close();
appLogger.info('WebSocket server closed');
}
appLogger.info('Graceful shutdown completed');
process.exit(0);
} catch (error) {
appLogger.error('Error during shutdown:', error);
process.exit(1);
}
}
// Start server function
async function startServer(): Promise<void> {
try {
appLogger.info('Starting Market Data Gateway...');
// Setup WebSocket server
setupWebSocketServer();
// Setup graceful shutdown handlers
process.on('SIGTERM', gracefulShutdown);
process.on('SIGINT', gracefulShutdown);
appLogger.info(`HTTP server starting on ${config.server.host}:${config.server.port}`);
appLogger.info(`WebSocket server running on port ${config.server.port + 1}`);
appLogger.info('Market Data Gateway started successfully');
} catch (error) {
appLogger.error('Failed to start server:', error);
process.exit(1);
}
}
// Start the application
if (require.main === module) {
startServer();
}
export default {
port: config.server.port,
fetch: app.fetch,
};

View file

@ -1,7 +0,0 @@
// Real-time market data processing components
export { SubscriptionManager } from '../services/SubscriptionManager';
export { DataSourceManager } from '../services/DataSourceManager';
export { EventPublisher } from '../services/EventPublisher';
export { ProcessingEngine } from '../services/ProcessingEngine';
export { MarketDataService } from '../services/MarketDataService';
export { MarketDataGatewayService } from '../services/MarketDataGatewayService';

View file

@ -1,361 +0,0 @@
import { EventEmitter } from 'events';
export interface CacheEntry<T> {
data: T;
timestamp: number;
ttl: number;
hits: number;
lastAccessed: number;
}
export interface CacheStats {
totalEntries: number;
memoryUsage: number;
hitRate: number;
totalHits: number;
totalMisses: number;
averageAccessTime: number;
}
export interface CacheConfig {
maxSize: number;
defaultTtl: number;
cleanupInterval: number;
enableStats: boolean;
compressionEnabled: boolean;
}
export class AdvancedCache<T = any> extends EventEmitter {
private cache = new Map<string, CacheEntry<T>>();
private stats = {
hits: 0,
misses: 0,
totalAccessTime: 0,
accessCount: 0,
};
private cleanupTimer: NodeJS.Timeout | null = null;
constructor(private config: CacheConfig) {
super();
this.startCleanupTimer();
}
/**
* Get value from cache
*/
get(key: string): T | null {
const startTime = Date.now();
const entry = this.cache.get(key);
if (!entry) {
this.stats.misses++;
this.emit('miss', key);
return null;
}
// Check if entry has expired
if (Date.now() > entry.timestamp + entry.ttl) {
this.cache.delete(key);
this.stats.misses++;
this.emit('expired', key, entry);
return null;
}
// Update access statistics
entry.hits++;
entry.lastAccessed = Date.now();
this.stats.hits++;
if (this.config.enableStats) {
this.stats.totalAccessTime += Date.now() - startTime;
this.stats.accessCount++;
}
this.emit('hit', key, entry);
return entry.data;
}
/**
* Set value in cache
*/
set(key: string, value: T, ttl?: number): void {
const effectiveTtl = ttl || this.config.defaultTtl;
// Check cache size limits
if (this.cache.size >= this.config.maxSize && !this.cache.has(key)) {
this.evictLeastUsed();
}
const entry: CacheEntry<T> = {
data: value,
timestamp: Date.now(),
ttl: effectiveTtl,
hits: 0,
lastAccessed: Date.now(),
};
this.cache.set(key, entry);
this.emit('set', key, entry);
}
/**
* Delete value from cache
*/
delete(key: string): boolean {
const deleted = this.cache.delete(key);
if (deleted) {
this.emit('delete', key);
}
return deleted;
}
/**
* Check if key exists in cache
*/
has(key: string): boolean {
const entry = this.cache.get(key);
if (!entry) return false;
// Check if expired
if (Date.now() > entry.timestamp + entry.ttl) {
this.cache.delete(key);
return false;
}
return true;
}
/**
* Clear all cache entries
*/
clear(): void {
this.cache.clear();
this.resetStats();
this.emit('clear');
}
/**
* Get cache statistics
*/
getStats(): CacheStats {
const memoryUsage = this.estimateMemoryUsage();
const hitRate = this.stats.hits + this.stats.misses > 0
? this.stats.hits / (this.stats.hits + this.stats.misses)
: 0;
const averageAccessTime = this.stats.accessCount > 0
? this.stats.totalAccessTime / this.stats.accessCount
: 0;
return {
totalEntries: this.cache.size,
memoryUsage,
hitRate,
totalHits: this.stats.hits,
totalMisses: this.stats.misses,
averageAccessTime,
};
}
/**
* Get all cache keys
*/
keys(): string[] {
return Array.from(this.cache.keys());
}
/**
* Get cache size
*/
size(): number {
return this.cache.size;
}
/**
* Get or set with async loader function
*/
async getOrSet<K>(
key: string,
loader: () => Promise<K>,
ttl?: number
): Promise<K> {
const cached = this.get(key) as K;
if (cached !== null) {
return cached;
}
try {
const value = await loader();
this.set(key, value as any, ttl);
return value;
} catch (error) {
this.emit('error', key, error);
throw error;
}
}
/**
* Batch get multiple keys
*/
mget(keys: string[]): Map<string, T | null> {
const result = new Map<string, T | null>();
for (const key of keys) {
result.set(key, this.get(key));
}
return result;
}
/**
* Batch set multiple key-value pairs
*/
mset(entries: Map<string, T>, ttl?: number): void {
for (const [key, value] of entries) {
this.set(key, value, ttl);
}
}
/**
* Clean up expired entries
*/
cleanup(): number {
const now = Date.now();
let removedCount = 0;
for (const [key, entry] of this.cache.entries()) {
if (now > entry.timestamp + entry.ttl) {
this.cache.delete(key);
removedCount++;
this.emit('expired', key, entry);
}
}
return removedCount;
}
/**
* Evict least recently used entries
*/
private evictLeastUsed(): void {
let oldestKey: string | null = null;
let oldestTime = Date.now();
for (const [key, entry] of this.cache.entries()) {
if (entry.lastAccessed < oldestTime) {
oldestTime = entry.lastAccessed;
oldestKey = key;
}
}
if (oldestKey) {
this.cache.delete(oldestKey);
this.emit('evict', oldestKey);
}
}
/**
* Estimate memory usage in bytes
*/
private estimateMemoryUsage(): number {
let totalSize = 0;
for (const [key, entry] of this.cache.entries()) {
// Rough estimation: key size + data size (as JSON string)
totalSize += key.length * 2; // UTF-16 encoding
totalSize += JSON.stringify(entry.data).length * 2;
totalSize += 64; // Overhead for entry metadata
}
return totalSize;
}
/**
* Reset statistics
*/
private resetStats(): void {
this.stats = {
hits: 0,
misses: 0,
totalAccessTime: 0,
accessCount: 0,
};
}
/**
* Start cleanup timer
*/
private startCleanupTimer(): void {
if (this.cleanupTimer) {
clearInterval(this.cleanupTimer);
}
this.cleanupTimer = setInterval(() => {
const removed = this.cleanup();
if (removed > 0) {
this.emit('cleanup', removed);
}
}, this.config.cleanupInterval);
}
/**
* Stop cleanup timer and close cache
*/
destroy(): void {
if (this.cleanupTimer) {
clearInterval(this.cleanupTimer);
this.cleanupTimer = null;
}
this.clear();
this.removeAllListeners();
}
}
// Specialized cache for market data
export class MarketDataCache extends AdvancedCache {
constructor() {
super({
maxSize: 10000,
defaultTtl: 60000, // 1 minute
cleanupInterval: 30000, // 30 seconds
enableStats: true,
compressionEnabled: false,
});
}
// Market data specific cache keys
getQuoteKey(symbol: string): string {
return `quote:${symbol}`;
}
getCandleKey(symbol: string, timeframe: string, timestamp: Date): string {
return `candle:${symbol}:${timeframe}:${timestamp.getTime()}`;
}
getOrderBookKey(symbol: string): string {
return `orderbook:${symbol}`;
}
// Market data specific TTLs
setQuote(symbol: string, data: any): void {
this.set(this.getQuoteKey(symbol), data, 60000); // 1 minute
}
setCandle(symbol: string, timeframe: string, timestamp: Date, data: any): void {
const ttl = this.getCandleTTL(timeframe);
this.set(this.getCandleKey(symbol, timeframe, timestamp), data, ttl);
}
setOrderBook(symbol: string, data: any): void {
this.set(this.getOrderBookKey(symbol), data, 30000); // 30 seconds
}
private getCandleTTL(timeframe: string): number {
const ttlMap: Record<string, number> = {
'1m': 60000, // 1 minute
'5m': 300000, // 5 minutes
'15m': 900000, // 15 minutes
'1h': 3600000, // 1 hour
'1d': 86400000, // 24 hours
};
return ttlMap[timeframe] || 300000; // Default 5 minutes
}
}

View file

@ -1,372 +0,0 @@
import Redis from 'ioredis';
import { EventEmitter } from 'events';
import {
MarketDataTick,
MarketDataCandle,
CacheConfig,
Logger,
HealthStatus,
GatewayMetrics
} from '../types/MarketDataGateway';
interface CacheMetrics {
hits: number;
misses: number;
sets: number;
deletes: number;
errors: number;
totalRequests: number;
hitRate: number;
}
interface CacheEntry<T> {
data: T;
timestamp: number;
ttl: number;
}
export class CacheManager extends EventEmitter {
private redis: Redis;
private config: CacheConfig;
private logger: Logger;
private metrics: CacheMetrics;
private isInitialized: boolean = false;
constructor(config: CacheConfig, logger: Logger) {
super();
this.config = config;
this.logger = logger;
this.redis = new Redis({
host: config.redis.host,
port: config.redis.port,
password: config.redis.password,
db: config.redis.database || 0,
retryDelayOnFailover: 100,
maxRetriesPerRequest: 3,
lazyConnect: true,
});
this.metrics = {
hits: 0,
misses: 0,
sets: 0,
deletes: 0,
errors: 0,
totalRequests: 0,
hitRate: 0,
};
this.setupEventHandlers();
}
private setupEventHandlers(): void {
this.redis.on('connect', () => {
this.logger.info('Cache manager connected to Redis');
this.isInitialized = true;
this.emit('connected');
});
this.redis.on('error', (error) => {
this.logger.error('Redis connection error:', error);
this.metrics.errors++;
this.emit('error', error);
});
this.redis.on('close', () => {
this.logger.warn('Redis connection closed');
this.isInitialized = false;
this.emit('disconnected');
});
}
async initialize(): Promise<void> {
try {
await this.redis.connect();
this.logger.info('Cache manager initialized successfully');
} catch (error) {
this.logger.error('Failed to initialize cache manager:', error);
throw error;
}
}
async shutdown(): Promise<void> {
try {
await this.redis.quit();
this.logger.info('Cache manager shut down successfully');
} catch (error) {
this.logger.error('Error shutting down cache manager:', error);
}
}
// Market data caching methods
async cacheTick(symbol: string, tick: MarketDataTick, ttl?: number): Promise<void> {
try {
const key = this.getTickKey(symbol);
const cacheEntry: CacheEntry<MarketDataTick> = {
data: tick,
timestamp: Date.now(),
ttl: ttl || this.config.tickTtl,
};
await this.redis.setex(key, cacheEntry.ttl, JSON.stringify(cacheEntry));
this.metrics.sets++;
// Also cache latest price for quick access
await this.redis.setex(
this.getLatestPriceKey(symbol),
this.config.tickTtl,
tick.price.toString()
);
this.emit('tick-cached', { symbol, tick });
} catch (error) {
this.logger.error(`Failed to cache tick for ${symbol}:`, error);
this.metrics.errors++;
throw error;
}
}
async getLatestTick(symbol: string): Promise<MarketDataTick | null> {
try {
this.metrics.totalRequests++;
const key = this.getTickKey(symbol);
const cached = await this.redis.get(key);
if (cached) {
this.metrics.hits++;
const entry: CacheEntry<MarketDataTick> = JSON.parse(cached);
return entry.data;
}
this.metrics.misses++;
return null;
} catch (error) {
this.logger.error(`Failed to get latest tick for ${symbol}:`, error);
this.metrics.errors++;
return null;
} finally {
this.updateHitRate();
}
}
async cacheCandle(symbol: string, timeframe: string, candle: MarketDataCandle, ttl?: number): Promise<void> {
try {
const key = this.getCandleKey(symbol, timeframe, candle.timestamp);
const cacheEntry: CacheEntry<MarketDataCandle> = {
data: candle,
timestamp: Date.now(),
ttl: ttl || this.config.candleTtl,
};
await this.redis.setex(key, cacheEntry.ttl, JSON.stringify(cacheEntry));
this.metrics.sets++;
// Also add to sorted set for range queries
await this.redis.zadd(
this.getCandleSetKey(symbol, timeframe),
candle.timestamp,
key
);
this.emit('candle-cached', { symbol, timeframe, candle });
} catch (error) {
this.logger.error(`Failed to cache candle for ${symbol}:`, error);
this.metrics.errors++;
throw error;
}
}
async getCandleRange(
symbol: string,
timeframe: string,
startTime: number,
endTime: number
): Promise<MarketDataCandle[]> {
try {
this.metrics.totalRequests++;
const setKey = this.getCandleSetKey(symbol, timeframe);
const candleKeys = await this.redis.zrangebyscore(setKey, startTime, endTime);
if (candleKeys.length === 0) {
this.metrics.misses++;
return [];
}
const pipeline = this.redis.pipeline();
candleKeys.forEach(key => pipeline.get(key));
const results = await pipeline.exec();
const candles: MarketDataCandle[] = [];
let hasData = false;
results?.forEach((result) => {
if (result && result[1]) {
hasData = true;
try {
const entry: CacheEntry<MarketDataCandle> = JSON.parse(result[1] as string);
candles.push(entry.data);
} catch (parseError) {
this.logger.error('Failed to parse cached candle:', parseError);
}
}
});
if (hasData) {
this.metrics.hits++;
} else {
this.metrics.misses++;
}
return candles.sort((a, b) => a.timestamp - b.timestamp);
} catch (error) {
this.logger.error(`Failed to get candle range for ${symbol}:`, error);
this.metrics.errors++;
return [];
} finally {
this.updateHitRate();
}
}
// Generic caching methods
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
try {
const cacheEntry: CacheEntry<T> = {
data: value,
timestamp: Date.now(),
ttl: ttl || this.config.defaultTtl,
};
await this.redis.setex(key, cacheEntry.ttl, JSON.stringify(cacheEntry));
this.metrics.sets++;
} catch (error) {
this.logger.error(`Failed to set cache key ${key}:`, error);
this.metrics.errors++;
throw error;
}
}
async get<T>(key: string): Promise<T | null> {
try {
this.metrics.totalRequests++;
const cached = await this.redis.get(key);
if (cached) {
this.metrics.hits++;
const entry: CacheEntry<T> = JSON.parse(cached);
return entry.data;
}
this.metrics.misses++;
return null;
} catch (error) {
this.logger.error(`Failed to get cache key ${key}:`, error);
this.metrics.errors++;
return null;
} finally {
this.updateHitRate();
}
}
async delete(key: string): Promise<void> {
try {
await this.redis.del(key);
this.metrics.deletes++;
} catch (error) {
this.logger.error(`Failed to delete cache key ${key}:`, error);
this.metrics.errors++;
throw error;
}
}
async deletePattern(pattern: string): Promise<number> {
try {
const keys = await this.redis.keys(pattern);
if (keys.length > 0) {
const deleted = await this.redis.del(...keys);
this.metrics.deletes += deleted;
return deleted;
}
return 0;
} catch (error) {
this.logger.error(`Failed to delete pattern ${pattern}:`, error);
this.metrics.errors++;
throw error;
}
}
// Cache management
async flush(): Promise<void> {
try {
await this.redis.flushdb();
this.logger.info('Cache flushed successfully');
} catch (error) {
this.logger.error('Failed to flush cache:', error);
this.metrics.errors++;
throw error;
}
}
async getSize(): Promise<number> {
try {
return await this.redis.dbsize();
} catch (error) {
this.logger.error('Failed to get cache size:', error);
return 0;
}
}
async getMemoryUsage(): Promise<{ used: number; peak: number }> {
try {
const info = await this.redis.memory('usage');
const stats = await this.redis.memory('stats');
return {
used: parseInt(info as string) || 0,
peak: parseInt(stats['peak.allocated'] as string) || 0,
};
} catch (error) {
this.logger.error('Failed to get memory usage:', error);
return { used: 0, peak: 0 };
}
}
// Health and metrics
getHealth(): HealthStatus {
return {
status: this.isInitialized ? 'healthy' : 'unhealthy',
message: this.isInitialized ? 'Cache manager is operational' : 'Cache manager not connected',
timestamp: new Date().toISOString(),
details: {
connected: this.isInitialized,
metrics: this.metrics,
},
};
}
getMetrics(): CacheMetrics {
return { ...this.metrics };
}
private updateHitRate(): void {
this.metrics.hitRate = this.metrics.totalRequests > 0
? this.metrics.hits / this.metrics.totalRequests
: 0;
}
// Key generation methods
private getTickKey(symbol: string): string {
return `tick:${symbol}:latest`;
}
private getLatestPriceKey(symbol: string): string {
return `price:${symbol}:latest`;
}
private getCandleKey(symbol: string, timeframe: string, timestamp: number): string {
return `candle:${symbol}:${timeframe}:${timestamp}`;
}
private getCandleSetKey(symbol: string, timeframe: string): string {
return `candles:${symbol}:${timeframe}`;
}
}

View file

@ -1,346 +0,0 @@
import { EventEmitter } from 'eventemitter3';
import {
BunHttpClient,
RequestConfig,
HttpResponse,
ConnectionStats,
HttpClientConfig
} from '@stock-bot/http-client';
export interface ConnectionPoolConfig {
maxConnections: number;
maxConnectionsPerHost: number;
connectionTimeout: number;
requestTimeout: number;
retryAttempts: number;
retryDelay: number;
keepAlive: boolean;
maxIdleTime: number;
}
export interface QueuedRequest {
id: string;
config: RequestConfig;
resolve: (value: any) => void;
reject: (error: any) => void;
timestamp: number;
retryCount: number;
}
export class ConnectionPoolManager extends EventEmitter {
private clients = new Map<string, BunHttpClient>();
private activeRequests = new Map<string, number>(); // host -> count
private requestQueue: QueuedRequest[] = [];
private stats = {
totalConnections: 0,
successfulRequests: 0,
failedRequests: 0,
totalResponseTime: 0,
requestCount: 0,
};
private isProcessingQueue = false;
private queueProcessor?: NodeJS.Timeout;
constructor(private config: ConnectionPoolConfig) {
super();
this.startQueueProcessor();
}
/**
* Get or create a client for a host
*/
private getClient(host: string): BunHttpClient {
if (!this.clients.has(host)) {
const client = new BunHttpClient({
baseURL: `https://${host}`,
timeout: this.config.requestTimeout,
retries: this.config.retryAttempts,
retryDelay: this.config.retryDelay,
keepAlive: this.config.keepAlive,
headers: {
'User-Agent': 'StockBot-MarketDataGateway/1.0',
'Accept': 'application/json',
},
validateStatus: (status: number) => status < 500,
});
// Listen for events from the client
client.on('response', (data) => {
const responseTime = data.response.timing.duration;
this.updateStats(true, responseTime);
this.emit('response', {
host,
responseTime,
status: data.response.status
});
});
client.on('error', (data) => {
const responseTime = data.error?.config?.metadata?.startTime
? Date.now() - data.error.config.metadata.startTime
: 0;
this.updateStats(false, responseTime);
this.emit('error', {
host,
error: data.error.message,
responseTime
});
});
this.clients.set(host, client);
this.activeRequests.set(host, 0);
this.stats.totalConnections++;
this.emit('connectionCreated', host);
}
return this.clients.get(host)!;
}
/**
* Make an HTTP request with connection pooling
*/
async request(config: RequestConfig): Promise<any> {
return new Promise((resolve, reject) => {
const requestId = this.generateRequestId();
const queuedRequest: QueuedRequest = {
id: requestId,
config,
resolve,
reject,
timestamp: Date.now(),
retryCount: 0,
};
this.requestQueue.push(queuedRequest);
this.processQueue();
});
}
/**
* Process the request queue
*/
private async processQueue(): Promise<void> {
if (this.isProcessingQueue || this.requestQueue.length === 0) {
return;
}
this.isProcessingQueue = true;
while (this.requestQueue.length > 0) {
const request = this.requestQueue.shift()!;
try {
const host = this.extractHost(request.config.url || '');
const currentConnections = this.activeRequests.get(host) || 0;
// Check connection limits
if (currentConnections >= this.config.maxConnectionsPerHost) {
// Put request back in queue
this.requestQueue.unshift(request);
break;
}
// Check global connection limit
const totalActive = Array.from(this.activeRequests.values()).reduce((sum, count) => sum + count, 0);
if (totalActive >= this.config.maxConnections) {
this.requestQueue.unshift(request);
break;
}
// Execute the request
this.executeRequest(request, host);
} catch (error) {
request.reject(error);
}
}
this.isProcessingQueue = false;
}
/**
* Execute a single request
*/
private async executeRequest(request: QueuedRequest, host: string): Promise<void> {
const client = this.getClient(host);
// Increment active connections
this.activeRequests.set(host, (this.activeRequests.get(host) || 0) + 1);
try {
// Add metadata to track timing
if (!request.config.metadata) {
request.config.metadata = {};
}
request.config.metadata.startTime = Date.now();
// Execute request using our client
const response = await client.request(request.config);
request.resolve(response.data);
} catch (error: any) {
// No need to handle retries explicitly as the BunHttpClient handles them internally
request.reject(error);
// Emit retry event for monitoring
if (error.retryCount) {
this.emit('retry', {
requestId: request.id,
retryCount: error.retryCount,
error
});
}
} finally {
// Decrement active connections
this.activeRequests.set(host, Math.max(0, (this.activeRequests.get(host) || 0) - 1));
}
}
/**
* Extract host from URL
*/
private extractHost(url: string): string {
try {
const urlObj = new URL(url);
return urlObj.host;
} catch {
return 'default';
}
}
/**
* Generate unique request ID
*/
private generateRequestId(): string {
return `req_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
/**
* Update statistics
*/
private updateStats(success: boolean, responseTime: number): void {
this.stats.requestCount++;
this.stats.totalResponseTime += responseTime;
if (success) {
this.stats.successfulRequests++;
} else {
this.stats.failedRequests++;
}
}
/**
* Get connection pool statistics
*/
getStats(): ConnectionStats {
const totalActive = Array.from(this.activeRequests.values()).reduce((sum, count) => sum + count, 0);
const averageResponseTime = this.stats.requestCount > 0
? this.stats.totalResponseTime / this.stats.requestCount
: 0;
const utilization = this.config.maxConnections > 0
? totalActive / this.config.maxConnections
: 0;
// Combine our stats with the stats from all clients
const clientStats = Array.from(this.clients.values()).map(client => client.getStats());
let successfulRequests = this.stats.successfulRequests;
let failedRequests = this.stats.failedRequests;
for (const stats of clientStats) {
successfulRequests += stats.successfulRequests;
failedRequests += stats.failedRequests;
}
return {
activeConnections: totalActive,
totalConnections: this.stats.totalConnections,
successfulRequests,
failedRequests,
averageResponseTime,
connectionPoolUtilization: utilization,
requestsPerSecond: 0 // Will be calculated by the http-client
};
}
/**
* Start queue processor timer
*/
private startQueueProcessor(): void {
this.queueProcessor = setInterval(() => {
this.processQueue();
}, 100); // Process queue every 100ms
}
/**
* Close all connections and clean up
*/
async close(): Promise<void> {
// Stop the queue processor
if (this.queueProcessor) {
clearInterval(this.queueProcessor);
}
// Wait for pending requests to complete (with timeout)
const timeout = 30000; // 30 seconds
const startTime = Date.now();
while (this.requestQueue.length > 0 && Date.now() - startTime < timeout) {
await new Promise(resolve => setTimeout(resolve, 100));
}
// Clear remaining requests
while (this.requestQueue.length > 0) {
const request = this.requestQueue.shift()!;
request.reject(new Error('Connection pool closing'));
}
// Close all clients
const closePromises = Array.from(this.clients.values()).map(client => client.close());
await Promise.all(closePromises);
// Clear clients and requests
this.clients.clear();
this.activeRequests.clear();
this.emit('closed');
}
/**
* Health check for the connection pool
*/
async healthCheck(): Promise<{ healthy: boolean; details: any }> {
const stats = this.getStats();
const queueSize = this.requestQueue.length;
// Check health of all clients
const clientHealthChecks = await Promise.all(
Array.from(this.clients.entries()).map(async ([host, client]) => {
const health = await client.healthCheck();
return {
host,
healthy: health.healthy,
details: health.details
};
})
);
const healthy =
stats.connectionPoolUtilization < 0.9 && // Less than 90% utilization
queueSize < 100 && // Queue not too large
stats.averageResponseTime < 5000 && // Average response time under 5 seconds
clientHealthChecks.every(check => check.healthy); // All clients healthy
return {
healthy,
details: {
stats,
queueSize,
clients: clientHealthChecks,
connections: Array.from(this.clients.keys()),
},
};
}
}

View file

@ -1,396 +0,0 @@
import { dataProviderConfigs, DataProviderConfig } from '../config/DataProviderConfig';
// Define local types for market data
interface MarketDataType {
symbol: string;
price: number;
bid: number;
ask: number;
volume: number;
timestamp: Date;
}
interface OHLCVType {
symbol: string;
timestamp: Date;
open: number;
high: number;
low: number;
close: number;
volume: number;
}
export interface DataNormalizationResult<T> {
success: boolean;
data?: T;
error?: string;
source: string;
timestamp: Date;
processingTimeMs: number;
}
export interface DataQualityMetrics {
completeness: number; // 0-1
accuracy: number; // 0-1
timeliness: number; // 0-1
consistency: number; // 0-1
overall: number; // 0-1
}
export class DataNormalizer {
private readonly providerConfigs: Record<string, DataProviderConfig>;
constructor() {
this.providerConfigs = dataProviderConfigs;
}
/**
* Normalize market data from different providers to our standard format
*/
normalizeMarketData(rawData: any, source: string): DataNormalizationResult<MarketDataType> {
const startTime = Date.now();
try {
let normalizedData: MarketDataType;
switch (source.toLowerCase()) {
case 'alpha-vantage':
normalizedData = this.normalizeAlphaVantage(rawData);
break;
case 'yahoo-finance':
normalizedData = this.normalizeYahooFinance(rawData);
break;
case 'polygon':
normalizedData = this.normalizePolygon(rawData);
break;
default:
return {
success: false,
error: `Unsupported data source: ${source}`,
source,
timestamp: new Date(),
processingTimeMs: Date.now() - startTime,
};
}
// Validate the normalized data
if (!this.validateMarketData(normalizedData)) {
return {
success: false,
error: 'Data validation failed',
source,
timestamp: new Date(),
processingTimeMs: Date.now() - startTime,
};
}
return {
success: true,
data: normalizedData,
source,
timestamp: new Date(),
processingTimeMs: Date.now() - startTime,
};
} catch (error) {
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
source,
timestamp: new Date(),
processingTimeMs: Date.now() - startTime,
};
}
} /**
* Normalize OHLCV data from different providers
*/
normalizeOHLCV(rawData: any, source: string): DataNormalizationResult<OHLCVType[]> {
const startTime = Date.now();
try {
let normalizedData: OHLCVType[];
switch (source.toLowerCase()) {
case 'alpha-vantage':
normalizedData = this.normalizeAlphaVantageOHLCV(rawData);
break;
case 'yahoo-finance':
normalizedData = this.normalizeYahooFinanceOHLCV(rawData);
break;
case 'polygon':
normalizedData = this.normalizePolygonOHLCV(rawData);
break;
default:
return {
success: false,
error: `Unsupported data source: ${source}`,
source,
timestamp: new Date(),
processingTimeMs: Date.now() - startTime,
};
}
// Validate each OHLCV entry
const validData = normalizedData.filter(item => this.validateOHLCV(item));
if (validData.length === 0) {
return {
success: false,
error: 'No valid OHLCV data after normalization',
source,
timestamp: new Date(),
processingTimeMs: Date.now() - startTime,
};
}
return {
success: true,
data: validData,
source,
timestamp: new Date(),
processingTimeMs: Date.now() - startTime,
};
} catch (error) {
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
source,
timestamp: new Date(),
processingTimeMs: Date.now() - startTime,
};
}
}
private normalizeAlphaVantage(data: any): MarketDataType {
const quote = data['Global Quote'];
return {
symbol: quote['01. symbol'],
price: parseFloat(quote['05. price']),
bid: parseFloat(quote['05. price']) - 0.01, // Approximate bid/ask
ask: parseFloat(quote['05. price']) + 0.01,
volume: parseInt(quote['06. volume']),
timestamp: new Date(),
};
}
private normalizeYahooFinance(data: any): MarketDataType {
return {
symbol: data.symbol,
price: data.regularMarketPrice,
bid: data.bid || data.regularMarketPrice - 0.01,
ask: data.ask || data.regularMarketPrice + 0.01,
volume: data.regularMarketVolume,
timestamp: new Date(data.regularMarketTime * 1000),
};
}
private normalizePolygon(data: any): MarketDataType {
// Polygon.io format normalization
return {
symbol: data.T || data.symbol,
price: data.c || data.price,
bid: data.b || data.bid,
ask: data.a || data.ask,
volume: data.v || data.volume,
timestamp: new Date(data.t || data.timestamp),
};
}
private normalizeAlphaVantageOHLCV(data: any): OHLCVType[] {
const timeSeries = data['Time Series (1min)'] || data['Time Series (5min)'] || data['Time Series (Daily)'];
const symbol = data['Meta Data']['2. Symbol'];
return Object.entries(timeSeries).map(([timestamp, values]: [string, any]) => ({
symbol,
timestamp: new Date(timestamp),
open: parseFloat(values['1. open']),
high: parseFloat(values['2. high']),
low: parseFloat(values['3. low']),
close: parseFloat(values['4. close']),
volume: parseInt(values['5. volume']),
})).sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());
}
private normalizeYahooFinanceOHLCV(data: any): OHLCVType[] {
const result = data.chart.result[0];
const timestamps = result.timestamp;
const quotes = result.indicators.quote[0];
return timestamps.map((timestamp: number, index: number) => ({
symbol: result.meta.symbol,
timestamp: new Date(timestamp * 1000),
open: quotes.open[index],
high: quotes.high[index],
low: quotes.low[index],
close: quotes.close[index],
volume: quotes.volume[index],
}));
}
private normalizePolygonOHLCV(data: any): OHLCVType[] {
// Polygon.io aggregates format
if (data.results && Array.isArray(data.results)) {
return data.results.map((candle: any) => ({
symbol: data.ticker || candle.T,
timestamp: new Date(candle.t),
open: candle.o,
high: candle.h,
low: candle.l,
close: candle.c,
volume: candle.v,
}));
}
return [];
} /**
* Validate market data quality
*/
validateMarketData(data: MarketDataType): boolean {
return (
data.symbol &&
typeof data.symbol === 'string' &&
data.symbol.length > 0 &&
typeof data.price === 'number' &&
data.price > 0 &&
typeof data.volume === 'number' &&
data.volume >= 0 &&
data.timestamp instanceof Date &&
!isNaN(data.timestamp.getTime()) &&
typeof data.bid === 'number' &&
typeof data.ask === 'number' &&
data.ask >= data.bid
) as boolean;
}
/**
* Validate OHLCV data quality
*/
validateOHLCV(data: OHLCVType): boolean {
return (
data.symbol &&
typeof data.symbol === 'string' &&
data.symbol.length > 0 &&
typeof data.open === 'number' && data.open > 0 &&
typeof data.high === 'number' && data.high > 0 &&
typeof data.low === 'number' && data.low > 0 &&
typeof data.close === 'number' && data.close > 0 &&
data.high >= Math.max(data.open, data.close) &&
data.low <= Math.min(data.open, data.close) &&
typeof data.volume === 'number' && data.volume >= 0 &&
data.timestamp instanceof Date &&
!isNaN(data.timestamp.getTime())
) as boolean;
}
/**
* Assess data quality metrics for market data
*/
assessDataQuality(data: MarketDataType[], source: string): DataQualityMetrics {
if (data.length === 0) {
return {
completeness: 0,
accuracy: 0,
timeliness: 0,
consistency: 0,
overall: 0,
};
}
// Completeness: percentage of valid data points
const validCount = data.filter(item => this.validateMarketData(item)).length;
const completeness = validCount / data.length;
// Accuracy: based on price consistency and reasonable ranges
const accuracyScore = this.assessAccuracy(data);
// Timeliness: based on data freshness
const timelinessScore = this.assessTimeliness(data);
// Consistency: based on data patterns and outliers
const consistencyScore = this.assessConsistency(data);
const overall = (completeness + accuracyScore + timelinessScore + consistencyScore) / 4;
return {
completeness,
accuracy: accuracyScore,
timeliness: timelinessScore,
consistency: consistencyScore,
overall,
};
}
private assessAccuracy(data: MarketDataType[]): number {
let accuracySum = 0;
for (const item of data) {
let score = 1.0;
// Check for reasonable price ranges
if (item.price <= 0 || item.price > 100000) score -= 0.3;
// Check bid/ask spread reasonableness
const spread = item.ask - item.bid;
const spreadPercentage = spread / item.price;
if (spreadPercentage > 0.1) score -= 0.2; // More than 10% spread is suspicious
// Check for negative volume
if (item.volume < 0) score -= 0.5;
accuracySum += Math.max(0, score);
}
return data.length > 0 ? accuracySum / data.length : 0;
}
private assessTimeliness(data: MarketDataType[]): number {
const now = new Date();
let timelinessSum = 0;
for (const item of data) {
const ageMs = now.getTime() - item.timestamp.getTime();
const ageMinutes = ageMs / (1000 * 60);
// Score based on data age (fresher is better)
let score = 1.0;
if (ageMinutes > 60) score = 0.1; // Very old data
else if (ageMinutes > 15) score = 0.5; // Moderately old
else if (ageMinutes > 5) score = 0.8; // Slightly old
timelinessSum += score;
}
return data.length > 0 ? timelinessSum / data.length : 0;
}
private assessConsistency(data: MarketDataType[]): number {
if (data.length < 2) return 1.0;
// Sort by timestamp
const sortedData = [...data].sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
let consistencySum = 0;
for (let i = 1; i < sortedData.length; i++) {
const current = sortedData[i];
const previous = sortedData[i - 1];
// Check for reasonable price movements
const priceChange = Math.abs(current.price - previous.price) / previous.price;
let score = 1.0;
if (priceChange > 0.5) score -= 0.7; // More than 50% change is suspicious
else if (priceChange > 0.1) score -= 0.3; // More than 10% change is notable
consistencySum += Math.max(0, score);
}
return consistencySum / (sortedData.length - 1);
}
/**
* Clean and sanitize market data
*/
sanitizeMarketData(data: MarketDataType): MarketDataType {
return {
symbol: data.symbol.toUpperCase().trim(),
price: Math.max(0, Number(data.price) || 0),
bid: Math.max(0, Number(data.bid) || 0),
ask: Math.max(0, Number(data.ask) || 0),
volume: Math.max(0, Math.floor(Number(data.volume) || 0)),
timestamp: new Date(data.timestamp),
};
}
}

View file

@ -1,598 +0,0 @@
import { EventEmitter } from 'eventemitter3';
// Local logger interface to avoid pino dependency issues
interface Logger {
info(msg: string, ...args: any[]): void;
error(msg: string, ...args: any[]): void;
warn(msg: string, ...args: any[]): void;
debug(msg: string, ...args: any[]): void;
child(options: any): Logger;
}
// Simple logger implementation
const createLogger = (name: string): Logger => ({
info: (msg: string, ...args: any[]) => console.log(`[${name}] INFO:`, msg, ...args),
error: (msg: string, ...args: any[]) => console.error(`[${name}] ERROR:`, msg, ...args),
warn: (msg: string, ...args: any[]) => console.warn(`[${name}] WARN:`, msg, ...args),
debug: (msg: string, ...args: any[]) => console.debug(`[${name}] DEBUG:`, msg, ...args),
child: (options: any) => createLogger(`${name}.${options.component || 'child'}`)
});
import WebSocket from 'ws';
// Simple HTTP client to replace axios
interface HttpClient {
get(url: string): Promise<{ data: any }>;
post(url: string, data?: any): Promise<{ data: any }>;
}
const createHttpClient = (baseURL: string, headers?: Record<string, string>): HttpClient => ({
get: async (url: string) => {
const response = await fetch(`${baseURL}${url}`, { headers });
return { data: await response.json() };
},
post: async (url: string, data?: any) => {
const response = await fetch(`${baseURL}${url}`, {
method: 'POST',
headers: { 'Content-Type': 'application/json', ...headers },
body: data ? JSON.stringify(data) : undefined
});
return { data: await response.json() };
}
});
import {
DataSourceConfig,
DataSourceMetrics,
DataSourceError,
MarketDataTick,
MarketDataCandle,
MarketDataTrade
} from '../types/MarketDataGateway';
interface DataSourceConnection {
config: DataSourceConfig;
connection?: WebSocket | HttpClient;
status: 'disconnected' | 'connecting' | 'connected' | 'error';
lastConnectedAt?: Date;
lastErrorAt?: Date;
retryCount: number;
metrics: {
messagesReceived: number;
bytesReceived: number;
errors: number;
latencyMs: number[];
};
}
export class DataSourceManager extends EventEmitter {
private dataSources: Map<string, DataSourceConnection> = new Map();
private logger: Logger;
private healthCheckInterval?: NodeJS.Timeout;
private reconnectTimeouts: Map<string, NodeJS.Timeout> = new Map();
constructor(configs: DataSourceConfig[], logger: Logger) {
super();
this.logger = logger;
this.initializeDataSources(configs);
}
private initializeDataSources(configs: DataSourceConfig[]) {
for (const config of configs) {
this.dataSources.set(config.id, {
config,
status: 'disconnected',
retryCount: 0,
metrics: {
messagesReceived: 0,
bytesReceived: 0,
errors: 0,
latencyMs: []
}
});
}
}
public async start(): Promise<void> {
this.logger.info('Starting Data Source Manager');
// Connect to all enabled data sources
const connectionPromises = Array.from(this.dataSources.values())
.filter(ds => ds.config.enabled)
.map(ds => this.connectDataSource(ds.config.id));
await Promise.allSettled(connectionPromises);
// Start health check interval
this.startHealthCheck();
this.logger.info('Data Source Manager started');
}
public async stop(): Promise<void> {
this.logger.info('Stopping Data Source Manager');
// Clear health check interval
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
}
// Clear all reconnect timeouts
for (const timeout of this.reconnectTimeouts.values()) {
clearTimeout(timeout);
}
this.reconnectTimeouts.clear();
// Disconnect all data sources
const disconnectionPromises = Array.from(this.dataSources.keys())
.map(sourceId => this.disconnectDataSource(sourceId));
await Promise.allSettled(disconnectionPromises);
this.logger.info('Data Source Manager stopped');
}
public async addDataSource(config: DataSourceConfig): Promise<void> {
this.logger.info({ sourceId: config.id }, 'Adding data source');
this.dataSources.set(config.id, {
config,
status: 'disconnected',
retryCount: 0,
metrics: {
messagesReceived: 0,
bytesReceived: 0,
errors: 0,
latencyMs: []
}
});
if (config.enabled) {
await this.connectDataSource(config.id);
}
}
public async removeDataSource(sourceId: string): Promise<void> {
this.logger.info(`Removing data source: ${sourceId}`);
await this.disconnectDataSource(sourceId);
this.dataSources.delete(sourceId);
const timeout = this.reconnectTimeouts.get(sourceId);
if (timeout) {
clearTimeout(timeout);
this.reconnectTimeouts.delete(sourceId);
}
}
public async updateDataSource(sourceId: string, updates: Partial<DataSourceConfig>): Promise<void> {
const dataSource = this.dataSources.get(sourceId);
if (!dataSource) {
throw new Error(`Data source ${sourceId} not found`);
}
this.logger.info(`Updating data source: ${sourceId}`, updates);
// Update configuration
dataSource.config = { ...dataSource.config, ...updates };
// Reconnect if needed
if (dataSource.status === 'connected') {
await this.disconnectDataSource(sourceId);
if (dataSource.config.enabled) {
await this.connectDataSource(sourceId);
}
}
}
public getDataSources(): DataSourceConfig[] {
return Array.from(this.dataSources.values()).map(ds => ds.config);
}
public getDataSourceMetrics(sourceId?: string): DataSourceMetrics[] {
const sources = sourceId
? [this.dataSources.get(sourceId)].filter(Boolean)
: Array.from(this.dataSources.values());
return sources.map(ds => ({
sourceId: ds!.config.id,
timestamp: new Date(),
connections: {
active: ds!.status === 'connected' ? 1 : 0,
total: 1,
failed: ds!.metrics.errors
},
messages: {
received: ds!.metrics.messagesReceived,
processed: ds!.metrics.messagesReceived, // Assuming all received are processed
errors: ds!.metrics.errors,
dropped: 0
},
latency: {
avgMs: this.calculateAverageLatency(ds!.metrics.latencyMs),
p50Ms: this.calculatePercentile(ds!.metrics.latencyMs, 0.5),
p95Ms: this.calculatePercentile(ds!.metrics.latencyMs, 0.95),
p99Ms: this.calculatePercentile(ds!.metrics.latencyMs, 0.99)
},
bandwidth: {
inboundBytesPerSecond: ds!.metrics.bytesReceived / 60, // Rough estimate
outboundBytesPerSecond: 0
}
}));
}
private async connectDataSource(sourceId: string): Promise<void> {
const dataSource = this.dataSources.get(sourceId);
if (!dataSource) {
throw new Error(`Data source ${sourceId} not found`);
}
if (dataSource.status === 'connected' || dataSource.status === 'connecting') {
return;
}
this.logger.info({ sourceId }, 'Connecting to data source');
dataSource.status = 'connecting';
try {
if (dataSource.config.type === 'websocket') {
await this.connectWebSocket(dataSource);
} else if (dataSource.config.type === 'rest') {
await this.connectREST(dataSource);
} else {
throw new Error(`Unsupported data source type: ${dataSource.config.type}`);
}
dataSource.status = 'connected';
dataSource.lastConnectedAt = new Date();
dataSource.retryCount = 0;
this.logger.info({ sourceId }, 'Data source connected');
this.emit('connected', sourceId);
} catch (error) {
this.logger.error({ sourceId, error }, 'Failed to connect to data source');
dataSource.status = 'error';
dataSource.lastErrorAt = new Date();
dataSource.metrics.errors++;
this.emit('error', sourceId, error);
this.scheduleReconnect(sourceId);
}
}
private async connectWebSocket(dataSource: DataSourceConnection): Promise<void> {
const { config } = dataSource;
const ws = new WebSocket(config.connection.url, {
headers: config.connection.headers
});
return new Promise((resolve, reject) => {
const connectTimeout = setTimeout(() => {
ws.close();
reject(new Error('WebSocket connection timeout'));
}, 10000);
ws.on('open', () => {
clearTimeout(connectTimeout);
this.logger.debug({ sourceId: config.id }, 'WebSocket connected');
// Send subscription messages
this.sendWebSocketSubscriptions(ws, config);
dataSource.connection = ws;
resolve();
});
ws.on('message', (data: Buffer) => {
const receiveTime = Date.now();
this.handleWebSocketMessage(config.id, data, receiveTime);
});
ws.on('error', (error) => {
clearTimeout(connectTimeout);
this.logger.error({ sourceId: config.id, error }, 'WebSocket error');
reject(error);
});
ws.on('close', () => {
this.logger.warn({ sourceId: config.id }, 'WebSocket disconnected');
dataSource.status = 'disconnected';
this.emit('disconnected', config.id);
this.scheduleReconnect(config.id);
});
});
}
private async connectREST(dataSource: DataSourceConnection): Promise<void> {
const { config } = dataSource;
const axiosInstance = axios.create({
baseURL: config.connection.url,
headers: config.connection.headers,
timeout: 5000,
params: config.connection.queryParams
});
// Add authentication if configured
if (config.connection.authentication) {
this.configureAuthentication(axiosInstance, config.connection.authentication);
}
// Test connection
try {
await axiosInstance.get('/health');
dataSource.connection = axiosInstance;
// Start polling for REST data sources
this.startRESTPolling(config.id);
} catch (error) {
throw new Error(`REST API health check failed: ${error}`);
}
}
private sendWebSocketSubscriptions(ws: WebSocket, config: DataSourceConfig): void {
const subscriptions = [];
if (config.subscriptions.quotes) {
subscriptions.push({
type: 'subscribe',
channel: 'quotes',
symbols: config.symbols
});
}
if (config.subscriptions.trades) {
subscriptions.push({
type: 'subscribe',
channel: 'trades',
symbols: config.symbols
});
}
if (config.subscriptions.orderbook) {
subscriptions.push({
type: 'subscribe',
channel: 'orderbook',
symbols: config.symbols
});
}
if (config.subscriptions.candles) {
subscriptions.push({
type: 'subscribe',
channel: 'candles',
symbols: config.symbols
});
}
for (const subscription of subscriptions) {
ws.send(JSON.stringify(subscription));
}
}
private handleWebSocketMessage(sourceId: string, data: Buffer, receiveTime: number): void {
const dataSource = this.dataSources.get(sourceId);
if (!dataSource) return;
try {
const message = JSON.parse(data.toString());
// Update metrics
dataSource.metrics.messagesReceived++;
dataSource.metrics.bytesReceived += data.length;
// Calculate latency if timestamp is available
if (message.timestamp) {
const latency = receiveTime - message.timestamp;
dataSource.metrics.latencyMs.push(latency);
// Keep only last 1000 latency measurements
if (dataSource.metrics.latencyMs.length > 1000) {
dataSource.metrics.latencyMs = dataSource.metrics.latencyMs.slice(-1000);
}
}
// Emit normalized data
const normalizedData = this.normalizeMessage(message, sourceId);
if (normalizedData) {
this.emit('data', sourceId, normalizedData);
}
} catch (error) {
this.logger.error({ sourceId, error }, 'Error parsing WebSocket message');
dataSource.metrics.errors++;
}
}
private startRESTPolling(sourceId: string): void {
const dataSource = this.dataSources.get(sourceId);
if (!dataSource || !dataSource.connection) return;
const pollInterval = 1000; // 1 second polling
const poll = async () => {
try {
const axiosInstance = dataSource.connection as AxiosInstance;
const response = await axiosInstance.get('/market-data');
dataSource.metrics.messagesReceived++;
dataSource.metrics.bytesReceived += JSON.stringify(response.data).length;
const normalizedData = this.normalizeMessage(response.data, sourceId);
if (normalizedData) {
this.emit('data', sourceId, normalizedData);
}
} catch (error) {
this.logger.error({ sourceId, error }, 'REST polling error');
dataSource.metrics.errors++;
}
// Schedule next poll
if (dataSource.status === 'connected') {
setTimeout(poll, pollInterval);
}
};
poll();
}
private normalizeMessage(message: any, sourceId: string): MarketDataTick | MarketDataCandle | MarketDataTrade | null {
// This is a simplified normalization - in reality, you'd have specific
// normalizers for each data source format
try {
if (message.type === 'quote' || message.price !== undefined) {
return {
symbol: message.symbol || message.s,
timestamp: message.timestamp || message.t || Date.now(),
price: message.price || message.p,
volume: message.volume || message.v || 0,
bid: message.bid || message.b,
ask: message.ask || message.a,
source: sourceId
} as MarketDataTick;
}
if (message.type === 'trade') {
return {
id: message.id || `${sourceId}-${Date.now()}`,
symbol: message.symbol || message.s,
timestamp: message.timestamp || message.t || Date.now(),
price: message.price || message.p,
size: message.size || message.q,
side: message.side || 'buy',
source: sourceId
} as MarketDataTrade;
}
if (message.type === 'candle' || message.ohlc) {
return {
symbol: message.symbol || message.s,
timestamp: message.timestamp || message.t || Date.now(),
open: message.open || message.o,
high: message.high || message.h,
low: message.low || message.l,
close: message.close || message.c,
volume: message.volume || message.v,
timeframe: message.timeframe || '1m',
source: sourceId
} as MarketDataCandle;
}
return null;
} catch (error) {
this.logger.error({ error, message, sourceId }, 'Error normalizing message');
return null;
}
}
private async disconnectDataSource(sourceId: string): Promise<void> {
const dataSource = this.dataSources.get(sourceId);
if (!dataSource || dataSource.status === 'disconnected') {
return;
}
this.logger.info({ sourceId }, 'Disconnecting data source');
if (dataSource.connection) {
if (dataSource.connection instanceof WebSocket) {
dataSource.connection.close();
}
// For REST connections, we just stop polling (handled in status check)
}
dataSource.status = 'disconnected';
dataSource.connection = undefined;
}
private scheduleReconnect(sourceId: string): void {
const dataSource = this.dataSources.get(sourceId);
if (!dataSource || !dataSource.config.enabled) {
return;
}
const { retryPolicy } = dataSource.config;
const backoffMs = Math.min(
retryPolicy.backoffMultiplier * Math.pow(2, dataSource.retryCount),
retryPolicy.maxBackoffMs
);
if (dataSource.retryCount < retryPolicy.maxRetries) {
this.logger.info({
sourceId,
retryCount: dataSource.retryCount,
backoffMs
}, 'Scheduling reconnect');
const timeout = setTimeout(() => {
dataSource.retryCount++;
this.connectDataSource(sourceId);
this.reconnectTimeouts.delete(sourceId);
}, backoffMs);
this.reconnectTimeouts.set(sourceId, timeout);
} else {
this.logger.error({ sourceId }, 'Max retries exceeded, giving up');
dataSource.status = 'error';
}
}
private startHealthCheck(): void {
this.healthCheckInterval = setInterval(() => {
for (const [sourceId, dataSource] of this.dataSources.entries()) {
if (dataSource.config.enabled && dataSource.status === 'disconnected') {
this.logger.debug({ sourceId }, 'Health check: attempting reconnect');
this.connectDataSource(sourceId);
}
}
}, 30000); // Check every 30 seconds
}
private configureAuthentication(axiosInstance: AxiosInstance, auth: any): void {
switch (auth.type) {
case 'apikey':
axiosInstance.defaults.headers.common['X-API-Key'] = auth.credentials.apiKey;
break;
case 'basic':
axiosInstance.defaults.auth = {
username: auth.credentials.username,
password: auth.credentials.password
};
break;
case 'jwt':
axiosInstance.defaults.headers.common['Authorization'] = `Bearer ${auth.credentials.token}`;
break;
}
}
private calculateAverageLatency(latencies: number[]): number {
if (latencies.length === 0) return 0;
return latencies.reduce((sum, lat) => sum + lat, 0) / latencies.length;
}
private calculatePercentile(values: number[], percentile: number): number {
if (values.length === 0) return 0;
const sorted = [...values].sort((a, b) => a - b);
const index = Math.ceil(sorted.length * percentile) - 1;
return sorted[Math.max(0, index)];
}
public async updateConfig(configs: DataSourceConfig[]): Promise<void> {
this.logger.info('Updating data source configurations');
// Remove sources that are no longer in config
const configIds = new Set(configs.map(c => c.id));
for (const sourceId of this.dataSources.keys()) {
if (!configIds.has(sourceId)) {
await this.removeDataSource(sourceId);
}
}
// Add or update sources
for (const config of configs) {
if (this.dataSources.has(config.id)) {
await this.updateDataSource(config.id, config);
} else {
await this.addDataSource(config);
}
}
}
}

View file

@ -1,140 +0,0 @@
import Redis from 'ioredis';
import { databaseConfig } from '@stock-bot/config';
import type { MarketDataEvent, SignalEvent, TradingEvent } from '@stock-bot/types';
export class EventPublisher {
private dragonfly: Redis;
private readonly STREAM_NAME = 'trading-events';
constructor() {
this.dragonfly = new Redis({
host: databaseConfig.dragonfly.host,
port: databaseConfig.dragonfly.port,
password: databaseConfig.dragonfly.password,
maxRetriesPerRequest: databaseConfig.dragonfly.maxRetriesPerRequest,
});
this.dragonfly.on('connect', () => {
console.log('🐉 Connected to Dragonfly for event publishing');
});
this.dragonfly.on('error', (error) => {
console.error('❌ Dragonfly connection error:', error);
});
}
/**
* Publish a market data event to the event stream
*/ async publishMarketData(event: MarketDataEvent): Promise<void> {
try {
await this.dragonfly.xadd(
this.STREAM_NAME,
'*',
'type', event.type,
'data', JSON.stringify(event.data),
'timestamp', event.timestamp.toISOString()
);
} catch (error) {
console.error('Error publishing market data event:', error);
throw error;
}
}
/**
* Publish a trading signal event
*/
async publishSignal(event: SignalEvent): Promise<void> {
try {
await this.dragonfly.xadd(
this.STREAM_NAME,
'*',
'type', event.type,
'signal', JSON.stringify(event.signal),
'timestamp', event.timestamp.toISOString()
);
} catch (error) {
console.error('Error publishing signal event:', error);
throw error;
}
}
/**
* Publish any trading event
*/
async publishEvent(event: TradingEvent): Promise<void> {
try {
const fields: string[] = ['type', event.type, 'timestamp', event.timestamp.toISOString()];
if ('data' in event) {
fields.push('data', JSON.stringify(event.data));
}
if ('order' in event) {
fields.push('order', JSON.stringify(event.order));
}
if ('signal' in event) {
fields.push('signal', JSON.stringify(event.signal));
}
await this.dragonfly.xadd(this.STREAM_NAME, '*', ...fields);
} catch (error) {
console.error('Error publishing event:', error);
throw error;
}
}
/**
* Cache market data in Dragonfly for quick access
*/
async cacheMarketData(symbol: string, data: any, ttl: number = 60): Promise<void> {
try {
const key = `market-data:${symbol}`;
await this.dragonfly.setex(key, ttl, JSON.stringify(data));
} catch (error) {
console.error('Error caching market data:', error);
}
}
/**
* Get cached market data from Dragonfly
*/
async getCachedMarketData(symbol: string): Promise<any | null> {
try {
const key = `market-data:${symbol}`;
const cached = await this.dragonfly.get(key);
return cached ? JSON.parse(cached) : null;
} catch (error) {
console.error('Error getting cached market data:', error);
return null;
}
}
/**
* Publish to a specific channel for real-time subscriptions
*/
async publishToChannel(channel: string, data: any): Promise<void> {
try {
await this.dragonfly.publish(channel, JSON.stringify(data));
} catch (error) {
console.error(`Error publishing to channel ${channel}:`, error);
throw error;
}
}
/**
* Set up health monitoring
*/
async setServiceHealth(serviceName: string, status: 'healthy' | 'unhealthy'): Promise<void> {
try {
const key = `health:${serviceName}`;
const healthData = {
status,
timestamp: new Date().toISOString(),
lastSeen: Date.now(),
};
await this.dragonfly.setex(key, 300, JSON.stringify(healthData)); // 5 minutes TTL
} catch (error) {
console.error('Error setting service health:', error);
}
}
/**
* Close Dragonfly connection
*/
async disconnect(): Promise<void> {
await this.dragonfly.quit();
}
}

View file

@ -1,404 +0,0 @@
import { EventEmitter } from 'eventemitter3';
// Local logger interface to avoid pino dependency issues
interface Logger {
info(msg: string | object, ...args: any[]): void;
error(msg: string | object, ...args: any[]): void;
warn(msg: string | object, ...args: any[]): void;
debug(msg: string | object, ...args: any[]): void;
child(options: any): Logger;
}
// Simple logger implementation
const createLogger = (name: string): Logger => ({
info: (msg: string | object, ...args: any[]) => {
if (typeof msg === 'object') {
console.log(`[${name}] INFO:`, JSON.stringify(msg), ...args);
} else {
console.log(`[${name}] INFO:`, msg, ...args);
}
},
error: (msg: string | object, ...args: any[]) => {
if (typeof msg === 'object') {
console.error(`[${name}] ERROR:`, JSON.stringify(msg), ...args);
} else {
console.error(`[${name}] ERROR:`, msg, ...args);
}
},
warn: (msg: string | object, ...args: any[]) => {
if (typeof msg === 'object') {
console.warn(`[${name}] WARN:`, JSON.stringify(msg), ...args);
} else {
console.warn(`[${name}] WARN:`, msg, ...args);
}
},
debug: (msg: string | object, ...args: any[]) => {
if (typeof msg === 'object') {
console.debug(`[${name}] DEBUG:`, JSON.stringify(msg), ...args);
} else {
console.debug(`[${name}] DEBUG:`, msg, ...args);
}
},
child: (options: any) => createLogger(`${name}.${options.component || 'child'}`)
});
import {
GatewayConfig,
DataSourceConfig,
ProcessingPipeline,
ClientSubscription,
SubscriptionRequest,
DataSourceMetrics,
GatewayMetrics,
MarketDataTick,
MarketDataCandle,
MarketDataTrade,
MarketDataOrder,
HealthStatus
} from '../types/MarketDataGateway';
import { DataSourceManager } from './DataSourceManager';
import { ProcessingEngine } from './ProcessingEngine';
import { SubscriptionManager } from './SubscriptionManager';
import { CacheManager } from './CacheManager';
import { MetricsCollector } from './MetricsCollector';
import { ServiceIntegrationManager } from './ServiceIntegrationManager';
export class MarketDataGatewayService extends EventEmitter {
private config: GatewayConfig;
private logger: Logger;
private dataSourceManager!: DataSourceManager;
private processingEngine!: ProcessingEngine;
private subscriptionManager!: SubscriptionManager;
private cacheManager!: CacheManager;
private metricsCollector!: MetricsCollector;
private serviceIntegration!: ServiceIntegrationManager;
private _isRunning = false;
private startTime: Date = new Date();
constructor(config: GatewayConfig, logger: Logger) {
super();
this.config = config;
this.logger = logger;
this.initializeComponents();
this.setupEventHandlers();
}
private initializeComponents() {
this.logger.info('Initializing Market Data Gateway components');
// Initialize core components
this.dataSourceManager = new DataSourceManager(
this.config.dataSources,
this.logger.child({ component: 'DataSourceManager' })
);
this.processingEngine = new ProcessingEngine(
this.config.processing,
this.logger.child({ component: 'ProcessingEngine' })
);
this.subscriptionManager = new SubscriptionManager(
this.logger.child({ component: 'SubscriptionManager' })
);
this.cacheManager = new CacheManager(
this.config.cache,
this.logger.child({ component: 'CacheManager' })
);
this.metricsCollector = new MetricsCollector(
this.logger.child({ component: 'MetricsCollector' })
);
this.serviceIntegration = new ServiceIntegrationManager(
this.logger.child({ component: 'ServiceIntegration' })
);
}
private setupEventHandlers() {
// Data source events
this.dataSourceManager.on('data', this.handleIncomingData.bind(this));
this.dataSourceManager.on('error', this.handleDataSourceError.bind(this));
this.dataSourceManager.on('connected', this.handleDataSourceConnected.bind(this));
this.dataSourceManager.on('disconnected', this.handleDataSourceDisconnected.bind(this));
// Processing engine events
this.processingEngine.on('processed', this.handleProcessedData.bind(this));
this.processingEngine.on('error', this.handleProcessingError.bind(this));
// Subscription events
this.subscriptionManager.on('subscribed', this.handleClientSubscribed.bind(this));
this.subscriptionManager.on('unsubscribed', this.handleClientUnsubscribed.bind(this));
this.subscriptionManager.on('error', this.handleSubscriptionError.bind(this));
// Cache events
this.cacheManager.on('cached', this.handleDataCached.bind(this));
this.cacheManager.on('error', this.handleCacheError.bind(this));
// Service integration events
this.serviceIntegration.on('data-forwarded', this.handleDataForwarded.bind(this));
this.serviceIntegration.on('integration-error', this.handleIntegrationError.bind(this));
}
public async start(): Promise<void> {
if (this.isRunning) {
this.logger.warn('Gateway is already running');
return;
}
try {
this.logger.info('Starting Market Data Gateway');
this.startTime = new Date();
// Start components in order
await this.cacheManager.start();
await this.metricsCollector.start();
await this.serviceIntegration.start();
await this.processingEngine.start();
await this.subscriptionManager.start();
await this.dataSourceManager.start();
this.isRunning = true;
this.logger.info('Market Data Gateway started successfully');
this.emit('started');
} catch (error) {
this.logger.error({ error }, 'Failed to start Market Data Gateway');
await this.stop();
throw error;
}
}
public async stop(): Promise<void> {
if (!this.isRunning) {
return;
}
try {
this.logger.info('Stopping Market Data Gateway');
// Stop components in reverse order
await this.dataSourceManager.stop();
await this.subscriptionManager.stop();
await this.processingEngine.stop();
await this.serviceIntegration.stop();
await this.metricsCollector.stop();
await this.cacheManager.stop();
this.isRunning = false;
this.logger.info('Market Data Gateway stopped');
this.emit('stopped');
} catch (error) {
this.logger.error({ error }, 'Error stopping Market Data Gateway');
throw error;
}
}
// Data handling methods
private async handleIncomingData(sourceId: string, data: any): Promise<void> {
try {
this.metricsCollector.recordMessage(sourceId, 'received');
// Process data through pipeline
const processedData = await this.processingEngine.process(data);
// Cache processed data
await this.cacheManager.cache(processedData);
// Forward to subscribers
await this.subscriptionManager.broadcast(processedData);
// Forward to integrated services
await this.serviceIntegration.forwardData(processedData);
this.emit('data-processed', { sourceId, data: processedData });
} catch (error) {
this.logger.error({ error, sourceId, data }, 'Error handling incoming data');
this.metricsCollector.recordError(sourceId);
}
}
private async handleProcessedData(data: any): Promise<void> {
this.logger.debug({ data }, 'Data processed successfully');
this.metricsCollector.recordMessage('processing', 'processed');
}
private handleDataSourceError(sourceId: string, error: Error): void {
this.logger.error({ sourceId, error }, 'Data source error');
this.metricsCollector.recordError(sourceId);
this.emit('source-error', { sourceId, error });
}
private handleDataSourceConnected(sourceId: string): void {
this.logger.info({ sourceId }, 'Data source connected');
this.metricsCollector.recordConnection(sourceId, 'connected');
}
private handleDataSourceDisconnected(sourceId: string): void {
this.logger.warn({ sourceId }, 'Data source disconnected');
this.metricsCollector.recordConnection(sourceId, 'disconnected');
}
private handleProcessingError(error: Error, data: any): void {
this.logger.error({ error, data }, 'Processing error');
this.emit('processing-error', { error, data });
}
private handleClientSubscribed(subscription: ClientSubscription): void {
this.logger.info({
clientId: subscription.request.clientId,
symbols: subscription.request.symbols
}, 'Client subscribed');
}
private handleClientUnsubscribed(clientId: string): void {
this.logger.info({ clientId }, 'Client unsubscribed');
}
private handleSubscriptionError(error: Error, clientId: string): void {
this.logger.error({ error, clientId }, 'Subscription error');
}
private handleDataCached(key: string, data: any): void {
this.logger.debug({ key }, 'Data cached');
}
private handleCacheError(error: Error, operation: string): void {
this.logger.error({ error, operation }, 'Cache error');
}
private handleDataForwarded(service: string, data: any): void {
this.logger.debug({ service }, 'Data forwarded to service');
}
private handleIntegrationError(service: string, error: Error): void {
this.logger.error({ service, error }, 'Service integration error');
}
// Public API methods
public async subscribe(request: SubscriptionRequest): Promise<string> {
return this.subscriptionManager.subscribe(request);
}
public async unsubscribe(subscriptionId: string): Promise<void> {
return this.subscriptionManager.unsubscribe(subscriptionId);
}
public async getSubscriptions(clientId?: string): Promise<ClientSubscription[]> {
return this.subscriptionManager.getSubscriptions(clientId);
}
public async addDataSource(config: DataSourceConfig): Promise<void> {
return this.dataSourceManager.addDataSource(config);
}
public async removeDataSource(sourceId: string): Promise<void> {
return this.dataSourceManager.removeDataSource(sourceId);
}
public async updateDataSource(sourceId: string, config: Partial<DataSourceConfig>): Promise<void> {
return this.dataSourceManager.updateDataSource(sourceId, config);
}
public async getDataSources(): Promise<DataSourceConfig[]> {
return this.dataSourceManager.getDataSources();
}
public async addProcessingPipeline(pipeline: ProcessingPipeline): Promise<void> {
return this.processingEngine.addPipeline(pipeline);
}
public async removeProcessingPipeline(pipelineId: string): Promise<void> {
return this.processingEngine.removePipeline(pipelineId);
}
public async getProcessingPipelines(): Promise<ProcessingPipeline[]> {
return this.processingEngine.getPipelines();
}
public async getMetrics(): Promise<GatewayMetrics> {
return this.metricsCollector.getMetrics();
}
public async getDataSourceMetrics(sourceId?: string): Promise<DataSourceMetrics[]> {
return this.metricsCollector.getDataSourceMetrics(sourceId);
}
public async getHealthStatus(): Promise<HealthStatus> {
const metrics = await this.getMetrics();
const dataSources = await this.getDataSources();
// Check component health
const dependencies = [
{
name: 'cache',
status: await this.cacheManager.isHealthy() ? 'healthy' : 'unhealthy' as const
},
{
name: 'processing-engine',
status: this.processingEngine.isHealthy() ? 'healthy' : 'unhealthy' as const
},
{
name: 'data-sources',
status: dataSources.every(ds => ds.enabled) ? 'healthy' : 'unhealthy' as const
}
];
const hasUnhealthyDependencies = dependencies.some(dep => dep.status === 'unhealthy');
return {
service: 'market-data-gateway',
status: hasUnhealthyDependencies ? 'degraded' : 'healthy',
timestamp: new Date(),
uptime: Date.now() - this.startTime.getTime(),
version: process.env.SERVICE_VERSION || '1.0.0',
dependencies,
metrics: {
connectionsActive: metrics.subscriptions.active,
messagesPerSecond: metrics.processing.messagesPerSecond,
errorRate: metrics.processing.errorRate,
avgLatencyMs: metrics.dataSources.reduce((sum, ds) => sum + ds.latency.avgMs, 0) / metrics.dataSources.length || 0
}
};
}
// Cache operations
public async getCachedData(key: string): Promise<any> {
return this.cacheManager.get(key);
}
public async setCachedData(key: string, data: any, ttl?: number): Promise<void> {
return this.cacheManager.set(key, data, ttl);
}
// Configuration management
public getConfig(): GatewayConfig {
return { ...this.config };
}
public async updateConfig(updates: Partial<GatewayConfig>): Promise<void> {
this.config = { ...this.config, ...updates };
this.logger.info('Gateway configuration updated');
// Notify components of config changes
if (updates.dataSources) {
await this.dataSourceManager.updateConfig(updates.dataSources);
}
if (updates.processing) {
await this.processingEngine.updateConfig(updates.processing);
}
this.emit('config-updated', this.config);
}
// Utility methods
public isRunning(): boolean {
return this.isRunning;
}
public getUptime(): number {
return Date.now() - this.startTime.getTime();
}
}

View file

@ -1,278 +0,0 @@
import type { MarketData, OHLCV, MarketDataEvent } from '@stock-bot/types';
import { dataProviderConfigs } from '@stock-bot/config';
import { EventPublisher } from './EventPublisher';
import { DataNormalizer } from './DataNormalizer';
export class MarketDataService {
private wsClients: Set<any> = new Set();
private subscriptions: Map<string, Set<any>> = new Map();
private dataUpdateInterval: Timer | null = null;
private readonly UPDATE_INTERVAL = 5000; // 5 seconds
constructor(
private eventPublisher: EventPublisher,
private dataNormalizer: DataNormalizer
) {}
/**
* Initialize the market data service
*/
async initialize(): Promise<void> {
console.log('🔄 Initializing Market Data Service...');
// Set up periodic data updates for demo purposes
this.startDataUpdates();
// Set service health
await this.eventPublisher.setServiceHealth('market-data-gateway', 'healthy');
console.log('✅ Market Data Service initialized');
}
/**
* Get latest market data for a symbol
*/
async getLatestData(symbol: string): Promise<MarketData> {
// First check cache
const cached = await this.eventPublisher.getCachedMarketData(symbol);
if (cached) {
return cached;
}
// Fetch fresh data (using demo data for now)
const marketData = this.generateDemoData(symbol);
// Cache the data
await this.eventPublisher.cacheMarketData(symbol, marketData, 60);
// Publish market data event
const event: MarketDataEvent = {
type: 'MARKET_DATA',
data: marketData,
timestamp: new Date(),
};
await this.eventPublisher.publishMarketData(event);
return marketData;
}
/**
* Get OHLCV data for a symbol
*/
async getOHLCV(symbol: string, interval: string, limit: number): Promise<OHLCV[]> {
// Generate demo OHLCV data
const ohlcvData = this.generateDemoOHLCV(symbol, limit);
// Cache the data
await this.eventPublisher.cacheMarketData(`ohlcv:${symbol}:${interval}`, ohlcvData, 300);
return ohlcvData;
}
/**
* Add WebSocket client for real-time updates
*/
addWebSocketClient(ws: any): void {
this.wsClients.add(ws);
}
/**
* Remove WebSocket client
*/
removeWebSocketClient(ws: any): void {
this.wsClients.delete(ws);
// Remove from all subscriptions
for (const [symbol, clients] of this.subscriptions) {
clients.delete(ws);
if (clients.size === 0) {
this.subscriptions.delete(symbol);
}
}
}
/**
* Handle WebSocket messages
*/
handleWebSocketMessage(ws: any, data: any): void {
try {
const message = typeof data === 'string' ? JSON.parse(data) : data;
switch (message.type) {
case 'subscribe':
this.subscribeToSymbol(ws, message.symbol);
break;
case 'unsubscribe':
this.unsubscribeFromSymbol(ws, message.symbol);
break;
default:
console.log('Unknown WebSocket message type:', message.type);
}
} catch (error) {
console.error('Error handling WebSocket message:', error);
}
}
/**
* Subscribe WebSocket client to symbol updates
*/
private subscribeToSymbol(ws: any, symbol: string): void {
if (!this.subscriptions.has(symbol)) {
this.subscriptions.set(symbol, new Set());
}
this.subscriptions.get(symbol)!.add(ws);
ws.send(JSON.stringify({
type: 'subscribed',
symbol,
timestamp: new Date().toISOString(),
}));
}
/**
* Unsubscribe WebSocket client from symbol updates
*/
private unsubscribeFromSymbol(ws: any, symbol: string): void {
const clients = this.subscriptions.get(symbol);
if (clients) {
clients.delete(ws);
if (clients.size === 0) {
this.subscriptions.delete(symbol);
}
}
ws.send(JSON.stringify({
type: 'unsubscribed',
symbol,
timestamp: new Date().toISOString(),
}));
}
/**
* Start periodic data updates for demo
*/
private startDataUpdates(): void {
this.dataUpdateInterval = setInterval(async () => {
const symbols = ['AAPL', 'GOOGL', 'MSFT', 'TSLA', 'AMZN'];
for (const symbol of symbols) {
if (this.subscriptions.has(symbol)) {
const marketData = this.generateDemoData(symbol);
// Send to subscribed WebSocket clients
const clients = this.subscriptions.get(symbol)!;
const message = JSON.stringify({
type: 'market_data',
data: marketData,
timestamp: new Date().toISOString(),
});
for (const client of clients) {
try {
client.send(message);
} catch (error) {
console.error('Error sending WebSocket message:', error);
clients.delete(client);
}
}
// Publish event
const event: MarketDataEvent = {
type: 'MARKET_DATA',
data: marketData,
timestamp: new Date(),
};
await this.eventPublisher.publishMarketData(event);
}
}
}, this.UPDATE_INTERVAL);
}
/**
* Generate demo market data
*/
private generateDemoData(symbol: string): MarketData {
const basePrice = this.getBasePrice(symbol);
const variation = (Math.random() - 0.5) * 0.02; // ±1% variation
const price = basePrice * (1 + variation);
return {
symbol,
price: Math.round(price * 100) / 100,
bid: Math.round((price - 0.01) * 100) / 100,
ask: Math.round((price + 0.01) * 100) / 100,
volume: Math.floor(Math.random() * 1000000) + 100000,
timestamp: new Date(),
};
}
/**
* Generate demo OHLCV data
*/
private generateDemoOHLCV(symbol: string, limit: number): OHLCV[] {
const basePrice = this.getBasePrice(symbol);
const data: OHLCV[] = [];
let currentPrice = basePrice;
for (let i = limit - 1; i >= 0; i--) {
const variation = (Math.random() - 0.5) * 0.05; // ±2.5% variation
const open = currentPrice;
const close = open * (1 + variation);
const high = Math.max(open, close) * (1 + Math.random() * 0.02);
const low = Math.min(open, close) * (1 - Math.random() * 0.02);
data.push({
symbol,
timestamp: new Date(Date.now() - i * 60000), // 1 minute intervals
open: Math.round(open * 100) / 100,
high: Math.round(high * 100) / 100,
low: Math.round(low * 100) / 100,
close: Math.round(close * 100) / 100,
volume: Math.floor(Math.random() * 50000) + 10000,
});
currentPrice = close;
}
return data;
}
/**
* Get base price for demo data
*/
private getBasePrice(symbol: string): number {
const prices: Record<string, number> = {
'AAPL': 175.50,
'GOOGL': 142.30,
'MSFT': 378.85,
'TSLA': 208.75,
'AMZN': 151.20,
'NVDA': 465.80,
'META': 298.45,
'NFLX': 425.60,
};
return prices[symbol] || 100.00;
}
/**
* Shutdown the service
*/
async shutdown(): Promise<void> {
console.log('🔄 Shutting down Market Data Service...');
if (this.dataUpdateInterval) {
clearInterval(this.dataUpdateInterval);
}
// Close all WebSocket connections
for (const client of this.wsClients) {
client.close();
}
await this.eventPublisher.setServiceHealth('market-data-gateway', 'unhealthy');
await this.eventPublisher.disconnect();
console.log('✅ Market Data Service shutdown complete');
}
}

View file

@ -1,511 +0,0 @@
import { EventEmitter } from 'events';
import {
GatewayMetrics,
Logger,
HealthStatus,
ProcessingMetrics,
DataSourceMetrics,
SubscriptionMetrics
} from '../types/MarketDataGateway';
interface MetricPoint {
value: number;
timestamp: number;
labels?: Record<string, string>;
}
interface TimeSeriesMetric {
name: string;
points: MetricPoint[];
maxPoints: number;
}
interface AlertRule {
id: string;
metric: string;
condition: 'gt' | 'lt' | 'eq' | 'gte' | 'lte';
threshold: number;
duration: number; // ms
enabled: boolean;
lastTriggered?: number;
}
interface Alert {
id: string;
rule: AlertRule;
value: number;
timestamp: number;
message: string;
severity: 'info' | 'warning' | 'error' | 'critical';
}
export class MetricsCollector extends EventEmitter {
private logger: Logger;
private metrics: Map<string, TimeSeriesMetric>;
private aggregatedMetrics: GatewayMetrics;
private alerts: Map<string, Alert>;
private alertRules: Map<string, AlertRule>;
private collectInterval: NodeJS.Timeout | null = null;
private isRunning: boolean = false;
constructor(logger: Logger) {
super();
this.logger = logger;
this.metrics = new Map();
this.alerts = new Map();
this.alertRules = new Map();
this.aggregatedMetrics = {
totalMessages: 0,
messagesPerSecond: 0,
averageLatency: 0,
errorRate: 0,
activeConnections: 0,
activeSubscriptions: 0,
cacheHitRate: 0,
uptime: 0,
timestamp: new Date().toISOString(),
dataSources: new Map(),
processing: {
totalProcessed: 0,
processedPerSecond: 0,
processingLatency: 0,
errorCount: 0,
queueDepth: 0,
processorMetrics: new Map(),
},
subscriptions: {
totalSubscriptions: 0,
activeClients: 0,
messagesSent: 0,
sendRate: 0,
subscriptionsBySymbol: new Map(),
clientMetrics: new Map(),
},
};
this.setupDefaultAlertRules();
this.startCollection();
}
private setupDefaultAlertRules(): void {
const defaultRules: AlertRule[] = [
{
id: 'high-error-rate',
metric: 'errorRate',
condition: 'gt',
threshold: 0.05, // 5%
duration: 60000, // 1 minute
enabled: true,
},
{
id: 'high-latency',
metric: 'averageLatency',
condition: 'gt',
threshold: 1000, // 1 second
duration: 30000, // 30 seconds
enabled: true,
},
{
id: 'low-cache-hit-rate',
metric: 'cacheHitRate',
condition: 'lt',
threshold: 0.8, // 80%
duration: 300000, // 5 minutes
enabled: true,
},
{
id: 'high-queue-depth',
metric: 'processing.queueDepth',
condition: 'gt',
threshold: 1000,
duration: 60000, // 1 minute
enabled: true,
},
];
defaultRules.forEach(rule => {
this.alertRules.set(rule.id, rule);
});
}
startCollection(): void {
if (this.isRunning) return;
this.isRunning = true;
this.collectInterval = setInterval(() => {
this.collectMetrics();
this.checkAlerts();
this.cleanupOldMetrics();
}, 1000); // Collect every second
this.logger.info('Metrics collection started');
}
stopCollection(): void {
if (!this.isRunning) return;
this.isRunning = false;
if (this.collectInterval) {
clearInterval(this.collectInterval);
this.collectInterval = null;
}
this.logger.info('Metrics collection stopped');
}
// Metric recording methods
recordMessage(source: string, latency?: number, error?: boolean): void {
this.recordMetric('totalMessages', 1);
this.recordMetric('messagesPerSecond', 1);
if (latency !== undefined) {
this.recordMetric('latency', latency, { source });
}
if (error) {
this.recordMetric('errors', 1, { source });
}
}
recordProcessing(processed: number, latency: number, errors: number): void {
this.recordMetric('processing.totalProcessed', processed);
this.recordMetric('processing.processedPerSecond', processed);
this.recordMetric('processing.processingLatency', latency);
this.recordMetric('processing.errorCount', errors);
}
recordSubscription(action: 'subscribe' | 'unsubscribe', symbol: string, clientId: string): void {
this.recordMetric('subscriptions.totalSubscriptions', action === 'subscribe' ? 1 : -1);
this.recordMetric(`subscriptions.symbol.${symbol}`, action === 'subscribe' ? 1 : -1);
this.recordMetric(`subscriptions.client.${clientId}`, action === 'subscribe' ? 1 : -1);
}
recordDataSource(sourceId: string, metrics: Partial<DataSourceMetrics>): void {
Object.entries(metrics).forEach(([key, value]) => {
if (typeof value === 'number') {
this.recordMetric(`dataSource.${sourceId}.${key}`, value);
}
});
}
recordCacheMetrics(hitRate: number, size: number, memoryUsage: number): void {
this.recordMetric('cacheHitRate', hitRate);
this.recordMetric('cacheSize', size);
this.recordMetric('cacheMemoryUsage', memoryUsage);
}
setGauge(metric: string, value: number, labels?: Record<string, string>): void {
this.recordMetric(metric, value, labels, true);
}
incrementCounter(metric: string, value: number = 1, labels?: Record<string, string>): void {
this.recordMetric(metric, value, labels, false);
}
recordHistogram(metric: string, value: number, labels?: Record<string, string>): void {
this.recordMetric(`${metric}.value`, value, labels);
this.recordMetric(`${metric}.count`, 1, labels);
}
private recordMetric(
name: string,
value: number,
labels?: Record<string, string>,
isGauge: boolean = false
): void {
const point: MetricPoint = {
value,
timestamp: Date.now(),
labels,
};
if (!this.metrics.has(name)) {
this.metrics.set(name, {
name,
points: [],
maxPoints: 3600, // Keep 1 hour of data at 1-second intervals
});
}
const metric = this.metrics.get(name)!;
if (isGauge) {
// For gauges, replace the last value
metric.points = [point];
} else {
// For counters and histograms, append
metric.points.push(point);
}
// Trim old points
if (metric.points.length > metric.maxPoints) {
metric.points = metric.points.slice(-metric.maxPoints);
}
}
// Metric retrieval methods
getMetric(name: string, duration?: number): MetricPoint[] {
const metric = this.metrics.get(name);
if (!metric) return [];
if (!duration) return [...metric.points];
const cutoff = Date.now() - duration;
return metric.points.filter(point => point.timestamp >= cutoff);
}
getAverageMetric(name: string, duration?: number): number {
const points = this.getMetric(name, duration);
if (points.length === 0) return 0;
const sum = points.reduce((acc, point) => acc + point.value, 0);
return sum / points.length;
}
getLatestMetric(name: string): number | null {
const metric = this.metrics.get(name);
if (!metric || metric.points.length === 0) return null;
return metric.points[metric.points.length - 1].value;
}
getRate(name: string, duration: number = 60000): number {
const points = this.getMetric(name, duration);
if (points.length < 2) return 0;
const oldest = points[0];
const newest = points[points.length - 1];
const timeDiff = newest.timestamp - oldest.timestamp;
const valueDiff = newest.value - oldest.value;
return timeDiff > 0 ? (valueDiff / timeDiff) * 1000 : 0; // per second
}
getPercentile(name: string, percentile: number, duration?: number): number {
const points = this.getMetric(name, duration);
if (points.length === 0) return 0;
const values = points.map(p => p.value).sort((a, b) => a - b);
const index = Math.ceil((percentile / 100) * values.length) - 1;
return values[Math.max(0, index)];
}
// Aggregated metrics
getAggregatedMetrics(): GatewayMetrics {
return { ...this.aggregatedMetrics };
}
private collectMetrics(): void {
const now = new Date().toISOString();
// Update basic metrics
this.aggregatedMetrics.totalMessages = this.getLatestMetric('totalMessages') || 0;
this.aggregatedMetrics.messagesPerSecond = this.getRate('messagesPerSecond');
this.aggregatedMetrics.averageLatency = this.getAverageMetric('latency', 60000);
this.aggregatedMetrics.cacheHitRate = this.getLatestMetric('cacheHitRate') || 0;
this.aggregatedMetrics.timestamp = now;
// Calculate error rate
const totalMessages = this.aggregatedMetrics.totalMessages;
const totalErrors = this.getLatestMetric('errors') || 0;
this.aggregatedMetrics.errorRate = totalMessages > 0 ? totalErrors / totalMessages : 0;
// Update processing metrics
this.aggregatedMetrics.processing.totalProcessed = this.getLatestMetric('processing.totalProcessed') || 0;
this.aggregatedMetrics.processing.processedPerSecond = this.getRate('processing.processedPerSecond');
this.aggregatedMetrics.processing.processingLatency = this.getAverageMetric('processing.processingLatency', 60000);
this.aggregatedMetrics.processing.errorCount = this.getLatestMetric('processing.errorCount') || 0;
this.aggregatedMetrics.processing.queueDepth = this.getLatestMetric('processing.queueDepth') || 0;
// Update subscription metrics
this.aggregatedMetrics.subscriptions.totalSubscriptions = this.getLatestMetric('subscriptions.totalSubscriptions') || 0;
this.aggregatedMetrics.subscriptions.messagesSent = this.getLatestMetric('subscriptions.messagesSent') || 0;
this.aggregatedMetrics.subscriptions.sendRate = this.getRate('subscriptions.messagesSent');
this.emit('metrics-updated', this.aggregatedMetrics);
}
// Alert management
addAlertRule(rule: AlertRule): void {
this.alertRules.set(rule.id, rule);
this.logger.info(`Alert rule added: ${rule.id}`);
}
removeAlertRule(ruleId: string): void {
this.alertRules.delete(ruleId);
this.alerts.delete(ruleId);
this.logger.info(`Alert rule removed: ${ruleId}`);
}
getAlertRules(): AlertRule[] {
return Array.from(this.alertRules.values());
}
getActiveAlerts(): Alert[] {
return Array.from(this.alerts.values());
}
private checkAlerts(): void {
for (const rule of this.alertRules.values()) {
if (!rule.enabled) continue;
const value = this.getMetricValue(rule.metric);
if (value === null) continue;
const isTriggered = this.evaluateCondition(value, rule.condition, rule.threshold);
if (isTriggered) {
const now = Date.now();
const existingAlert = this.alerts.get(rule.id);
// Check if alert should be triggered based on duration
if (!existingAlert || (now - existingAlert.timestamp) >= rule.duration) {
const alert: Alert = {
id: rule.id,
rule,
value,
timestamp: now,
message: `Alert: ${rule.metric} ${rule.condition} ${rule.threshold} (current: ${value})`,
severity: this.getSeverity(rule.metric, value),
};
this.alerts.set(rule.id, alert);
this.emit('alert-triggered', alert);
this.logger.warn(`Alert triggered: ${alert.message}`);
}
} else {
// Clear alert if condition is no longer met
if (this.alerts.has(rule.id)) {
this.alerts.delete(rule.id);
this.emit('alert-cleared', rule.id);
this.logger.info(`Alert cleared: ${rule.id}`);
}
}
}
}
private getMetricValue(metricPath: string): number | null {
if (metricPath.includes('.')) {
// Handle nested metric paths
const parts = metricPath.split('.');
let value: any = this.aggregatedMetrics;
for (const part of parts) {
if (value && typeof value === 'object' && part in value) {
value = value[part];
} else {
return null;
}
}
return typeof value === 'number' ? value : null;
}
return this.getLatestMetric(metricPath);
}
private evaluateCondition(value: number, condition: string, threshold: number): boolean {
switch (condition) {
case 'gt': return value > threshold;
case 'lt': return value < threshold;
case 'eq': return value === threshold;
case 'gte': return value >= threshold;
case 'lte': return value <= threshold;
default: return false;
}
}
private getSeverity(metric: string, value: number): Alert['severity'] {
// Define severity based on metric type and value
if (metric.includes('error') || metric.includes('Error')) {
if (value > 0.1) return 'critical'; // > 10% error rate
if (value > 0.05) return 'error'; // > 5% error rate
if (value > 0.01) return 'warning'; // > 1% error rate
return 'info';
}
if (metric.includes('latency') || metric.includes('Latency')) {
if (value > 5000) return 'critical'; // > 5 seconds
if (value > 2000) return 'error'; // > 2 seconds
if (value > 1000) return 'warning'; // > 1 second
return 'info';
}
return 'warning'; // Default severity
}
private cleanupOldMetrics(): void {
const cutoff = Date.now() - (24 * 60 * 60 * 1000); // 24 hours
for (const metric of this.metrics.values()) {
metric.points = metric.points.filter(point => point.timestamp > cutoff);
}
}
// Health and status
getHealth(): HealthStatus {
const activeAlerts = this.getActiveAlerts();
const criticalAlerts = activeAlerts.filter(a => a.severity === 'critical');
const errorAlerts = activeAlerts.filter(a => a.severity === 'error');
let status: 'healthy' | 'degraded' | 'unhealthy' = 'healthy';
let message = 'Metrics collector is operational';
if (criticalAlerts.length > 0) {
status = 'unhealthy';
message = `${criticalAlerts.length} critical alerts active`;
} else if (errorAlerts.length > 0) {
status = 'degraded';
message = `${errorAlerts.length} error alerts active`;
}
return {
status,
message,
timestamp: new Date().toISOString(),
details: {
isRunning: this.isRunning,
totalMetrics: this.metrics.size,
activeAlerts: activeAlerts.length,
alertRules: this.alertRules.size,
},
};
}
// Export methods
exportMetrics(format: 'json' | 'prometheus' = 'json'): string {
if (format === 'prometheus') {
return this.exportPrometheusFormat();
}
return JSON.stringify({
aggregated: this.aggregatedMetrics,
timeSeries: Object.fromEntries(this.metrics),
alerts: Object.fromEntries(this.alerts),
}, null, 2);
}
private exportPrometheusFormat(): string {
const lines: string[] = [];
// Export aggregated metrics
lines.push(`# HELP gateway_total_messages Total messages processed`);
lines.push(`# TYPE gateway_total_messages counter`);
lines.push(`gateway_total_messages ${this.aggregatedMetrics.totalMessages}`);
lines.push(`# HELP gateway_messages_per_second Messages processed per second`);
lines.push(`# TYPE gateway_messages_per_second gauge`);
lines.push(`gateway_messages_per_second ${this.aggregatedMetrics.messagesPerSecond}`);
lines.push(`# HELP gateway_average_latency Average processing latency in milliseconds`);
lines.push(`# TYPE gateway_average_latency gauge`);
lines.push(`gateway_average_latency ${this.aggregatedMetrics.averageLatency}`);
lines.push(`# HELP gateway_error_rate Error rate as percentage`);
lines.push(`# TYPE gateway_error_rate gauge`);
lines.push(`gateway_error_rate ${this.aggregatedMetrics.errorRate}`);
return lines.join('\n');
}
}

View file

@ -1,539 +0,0 @@
import { EventEmitter } from 'eventemitter3';
import { Logger } from 'pino';
import {
ProcessingPipeline,
DataProcessor,
MarketDataTick,
MarketDataCandle,
MarketDataTrade,
ProcessingError
} from '../types/MarketDataGateway';
interface ProcessingJob {
id: string;
data: any;
pipeline: ProcessingPipeline;
timestamp: Date;
attempts: number;
}
export class ProcessingEngine extends EventEmitter {
private config: any;
private logger: Logger;
private pipelines: Map<string, ProcessingPipeline> = new Map();
private processors: Map<string, DataProcessor> = new Map();
private processingQueue: ProcessingJob[] = [];
private isProcessing = false;
private processingStats = {
totalProcessed: 0,
totalErrors: 0,
avgProcessingTimeMs: 0,
processingTimes: [] as number[]
};
constructor(config: any, logger: Logger) {
super();
this.config = config;
this.logger = logger;
this.initializeBuiltInProcessors();
}
private initializeBuiltInProcessors() {
// Data validation processor
this.processors.set('data-validator', {
id: 'data-validator',
name: 'Data Validator',
type: 'validation',
enabled: true,
priority: 1,
config: {},
process: this.validateData.bind(this)
});
// Data enrichment processor
this.processors.set('data-enricher', {
id: 'data-enricher',
name: 'Data Enricher',
type: 'enrichment',
enabled: true,
priority: 2,
config: {},
process: this.enrichData.bind(this)
});
// Data normalization processor
this.processors.set('data-normalizer', {
id: 'data-normalizer',
name: 'Data Normalizer',
type: 'normalization',
enabled: true,
priority: 3,
config: {},
process: this.normalizeData.bind(this)
});
// Outlier detection processor
this.processors.set('outlier-detector', {
id: 'outlier-detector',
name: 'Outlier Detector',
type: 'filter',
enabled: true,
priority: 4,
config: {
priceDeviationThreshold: 0.1, // 10% price deviation
volumeThreshold: 1000000 // Minimum volume threshold
},
process: this.detectOutliers.bind(this)
});
// Market hours filter
this.processors.set('market-hours-filter', {
id: 'market-hours-filter',
name: 'Market Hours Filter',
type: 'filter',
enabled: true,
priority: 5,
config: {
marketOpen: '09:30',
marketClose: '16:00',
timezone: 'America/New_York'
},
process: this.filterMarketHours.bind(this)
});
// OHLC aggregator
this.processors.set('ohlc-aggregator', {
id: 'ohlc-aggregator',
name: 'OHLC Aggregator',
type: 'aggregation',
enabled: true,
priority: 6,
config: {
timeframes: ['1m', '5m', '15m', '1h', '1d']
},
process: this.aggregateOHLC.bind(this)
});
}
public async start(): Promise<void> {
this.logger.info('Starting Processing Engine');
// Load configured pipelines
if (this.config.pipelines) {
for (const pipeline of this.config.pipelines) {
this.addPipeline(pipeline);
}
}
// Start processing loop
this.startProcessing();
this.logger.info('Processing Engine started');
}
public async stop(): Promise<void> {
this.logger.info('Stopping Processing Engine');
this.isProcessing = false;
// Wait for current processing to complete
while (this.processingQueue.length > 0) {
await new Promise(resolve => setTimeout(resolve, 100));
}
this.logger.info('Processing Engine stopped');
}
public async process(data: any): Promise<any> {
const startTime = Date.now();
try {
// Find applicable pipelines for this data
const applicablePipelines = this.findApplicablePipelines(data);
if (applicablePipelines.length === 0) {
// No processing needed, return original data
return data;
}
let processedData = data;
// Process through each applicable pipeline
for (const pipeline of applicablePipelines) {
processedData = await this.processThroughPipeline(processedData, pipeline);
}
// Update processing stats
const processingTime = Date.now() - startTime;
this.updateProcessingStats(processingTime, false);
this.emit('processed', processedData);
return processedData;
} catch (error) {
this.logger.error({ error, data }, 'Processing error');
this.updateProcessingStats(Date.now() - startTime, true);
this.emit('error', error, data);
throw error;
}
}
public addPipeline(pipeline: ProcessingPipeline): void {
this.logger.info({ pipelineId: pipeline.id }, 'Adding processing pipeline');
this.pipelines.set(pipeline.id, pipeline);
}
public removePipeline(pipelineId: string): void {
this.logger.info({ pipelineId }, 'Removing processing pipeline');
this.pipelines.delete(pipelineId);
}
public getPipelines(): ProcessingPipeline[] {
return Array.from(this.pipelines.values());
}
public addProcessor(processor: DataProcessor): void {
this.logger.info({ processorId: processor.id }, 'Adding data processor');
this.processors.set(processor.id, processor);
}
public removeProcessor(processorId: string): void {
this.logger.info({ processorId }, 'Removing data processor');
this.processors.delete(processorId);
}
public getProcessors(): DataProcessor[] {
return Array.from(this.processors.values());
}
public getProcessingStats() {
return {
...this.processingStats,
queueDepth: this.processingQueue.length
};
}
public isHealthy(): boolean {
return this.isProcessing && this.processingStats.totalErrors / Math.max(this.processingStats.totalProcessed, 1) < 0.1;
}
private findApplicablePipelines(data: any): ProcessingPipeline[] {
const applicable: ProcessingPipeline[] = [];
for (const pipeline of this.pipelines.values()) {
if (this.isPipelineApplicable(data, pipeline)) {
applicable.push(pipeline);
}
}
return applicable;
}
private isPipelineApplicable(data: any, pipeline: ProcessingPipeline): boolean {
const { inputFilter } = pipeline;
// Check symbol filter
if (inputFilter.symbols && inputFilter.symbols.length > 0) {
if (!data.symbol || !inputFilter.symbols.includes(data.symbol)) {
return false;
}
}
// Check source filter
if (inputFilter.sources && inputFilter.sources.length > 0) {
if (!data.source || !inputFilter.sources.includes(data.source)) {
return false;
}
}
// Check data type filter
if (inputFilter.dataTypes && inputFilter.dataTypes.length > 0) {
const dataType = this.getDataType(data);
if (!inputFilter.dataTypes.includes(dataType)) {
return false;
}
}
return true;
}
private getDataType(data: any): string {
if (data.id && data.side) return 'trade';
if (data.open !== undefined && data.high !== undefined) return 'candle';
if (data.price !== undefined) return 'quote';
if (data.bids || data.asks) return 'orderbook';
return 'unknown';
}
private async processThroughPipeline(data: any, pipeline: ProcessingPipeline): Promise<any> {
let processedData = data;
// Sort processors by priority
const sortedProcessors = pipeline.processors
.filter(p => p.enabled)
.sort((a, b) => a.priority - b.priority);
for (const processorConfig of sortedProcessors) {
const processor = this.processors.get(processorConfig.id);
if (!processor) {
this.logger.warn({
processorId: processorConfig.id,
pipelineId: pipeline.id
}, 'Processor not found');
continue;
}
try {
processedData = await processor.process(processedData);
// If processor returns null/undefined, filter out the data
if (processedData === null || processedData === undefined) {
this.logger.debug({
processorId: processor.id,
pipelineId: pipeline.id
}, 'Data filtered out by processor');
return null;
}
} catch (error) {
this.logger.error({
error,
processorId: processor.id,
pipelineId: pipeline.id,
data: processedData
}, 'Processor error');
// Continue processing with original data on error
// You might want to implement different error handling strategies
}
}
return processedData;
}
private startProcessing(): void {
this.isProcessing = true;
const processLoop = async () => {
while (this.isProcessing) {
if (this.processingQueue.length > 0) {
const job = this.processingQueue.shift()!;
try {
await this.processThroughPipeline(job.data, job.pipeline);
} catch (error) {
this.logger.error({
jobId: job.id,
error
}, 'Job processing error');
}
} else {
// Wait for new jobs
await new Promise(resolve => setTimeout(resolve, 10));
}
}
};
processLoop();
}
private updateProcessingStats(processingTime: number, isError: boolean): void {
this.processingStats.totalProcessed++;
if (isError) {
this.processingStats.totalErrors++;
}
this.processingStats.processingTimes.push(processingTime);
// Keep only last 1000 processing times
if (this.processingStats.processingTimes.length > 1000) {
this.processingStats.processingTimes = this.processingStats.processingTimes.slice(-1000);
}
// Update average processing time
this.processingStats.avgProcessingTimeMs =
this.processingStats.processingTimes.reduce((sum, time) => sum + time, 0) /
this.processingStats.processingTimes.length;
}
// Built-in processor implementations
private async validateData(data: any): Promise<any> {
// Basic data validation
if (!data) {
throw new Error('Data is null or undefined');
}
if (!data.symbol) {
throw new Error('Missing symbol');
}
if (!data.timestamp) {
data.timestamp = Date.now();
}
// Validate price data
if (data.price !== undefined) {
if (typeof data.price !== 'number' || data.price <= 0) {
throw new Error('Invalid price');
}
}
// Validate volume data
if (data.volume !== undefined) {
if (typeof data.volume !== 'number' || data.volume < 0) {
throw new Error('Invalid volume');
}
}
return data;
}
private async enrichData(data: any): Promise<any> {
// Add computed fields
const enriched = { ...data };
// Add processing timestamp
enriched.processedAt = Date.now();
// Add data type
enriched.dataType = this.getDataType(data);
// Calculate derived metrics for quotes
if (data.price && data.prevClose) {
enriched.change = data.price - data.prevClose;
enriched.changePercent = (enriched.change / data.prevClose) * 100;
}
// Add market session info
const marketSession = this.getMarketSession(data.timestamp);
enriched.marketSession = marketSession;
return enriched;
}
private async normalizeData(data: any): Promise<any> {
const normalized = { ...data };
// Normalize symbol format
if (normalized.symbol) {
normalized.symbol = normalized.symbol.toUpperCase().trim();
}
// Normalize timestamp to milliseconds
if (normalized.timestamp) {
if (typeof normalized.timestamp === 'string') {
normalized.timestamp = new Date(normalized.timestamp).getTime();
} else if (normalized.timestamp.toString().length === 10) {
// Convert seconds to milliseconds
normalized.timestamp *= 1000;
}
}
// Round price values to appropriate precision
if (normalized.price) {
normalized.price = Math.round(normalized.price * 10000) / 10000;
}
return normalized;
}
private async detectOutliers(data: any): Promise<any> {
// Simple outlier detection - in practice, you'd use historical data
const config = this.processors.get('outlier-detector')?.config;
if (data.price && data.prevClose) {
const priceDeviation = Math.abs(data.price - data.prevClose) / data.prevClose;
if (priceDeviation > (config?.priceDeviationThreshold || 0.1)) {
this.logger.warn({
symbol: data.symbol,
price: data.price,
prevClose: data.prevClose,
deviation: priceDeviation
}, 'Price outlier detected');
// You could either filter out or flag the data
data.outlier = true;
data.outlierReason = 'price_deviation';
}
}
if (data.volume) {
const volumeThreshold = config?.volumeThreshold || 1000000;
if (data.volume > volumeThreshold) {
data.highVolume = true;
}
}
return data;
}
private async filterMarketHours(data: any): Promise<any> {
const config = this.processors.get('market-hours-filter')?.config;
if (!config?.marketOpen || !config?.marketClose) {
return data;
}
// Simple market hours check - in practice, you'd use proper timezone handling
const timestamp = new Date(data.timestamp);
const timeString = timestamp.toTimeString().substring(0, 5);
if (timeString < config.marketOpen || timeString > config.marketClose) {
// Mark as after hours
data.afterHours = true;
}
return data;
}
private async aggregateOHLC(data: any): Promise<any> {
// This is a simplified version - in practice, you'd maintain
// aggregation windows and emit candles when complete
if (data.dataType === 'quote' && data.price) {
const candle = {
symbol: data.symbol,
timestamp: data.timestamp,
open: data.price,
high: data.price,
low: data.price,
close: data.price,
volume: data.volume || 0,
timeframe: '1m',
source: data.source,
dataType: 'candle'
};
// In practice, you'd emit this separately or include it in results
this.emit('candle-generated', candle);
}
return data;
}
private getMarketSession(timestamp: number): string {
const date = new Date(timestamp);
const timeString = date.toTimeString().substring(0, 5);
if (timeString < '09:30') return 'pre-market';
if (timeString <= '16:00') return 'regular';
if (timeString <= '20:00') return 'after-hours';
return 'closed';
}
public async updateConfig(config: any): Promise<void> {
this.config = config;
this.logger.info('Processing engine configuration updated');
// Update pipelines if provided
if (config.pipelines) {
// Clear existing pipelines
this.pipelines.clear();
// Add new pipelines
for (const pipeline of config.pipelines) {
this.addPipeline(pipeline);
}
}
}
}

View file

@ -1,540 +0,0 @@
import { EventEmitter } from 'events';
import axios, { AxiosInstance } from 'axios';
import {
ServiceIntegration,
Logger,
HealthStatus,
MarketDataTick,
MarketDataCandle,
ProcessedData,
DataPipelineJob,
FeatureComputationRequest,
DataAsset
} from '../types/MarketDataGateway';
interface ServiceEndpoint {
baseUrl: string;
timeout: number;
retries: number;
healthPath: string;
}
interface ServiceHealth {
serviceId: string;
status: 'healthy' | 'degraded' | 'unhealthy' | 'unreachable';
lastCheck: number;
responseTime: number;
errorCount: number;
}
interface IntegrationMetrics {
totalRequests: number;
successfulRequests: number;
failedRequests: number;
averageResponseTime: number;
lastRequestTime: number;
}
export class ServiceIntegrationManager extends EventEmitter {
private config: ServiceIntegration;
private logger: Logger;
private httpClients: Map<string, AxiosInstance>;
private serviceHealth: Map<string, ServiceHealth>;
private integrationMetrics: Map<string, IntegrationMetrics>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private isInitialized: boolean = false;
constructor(config: ServiceIntegration, logger: Logger) {
super();
this.config = config;
this.logger = logger;
this.httpClients = new Map();
this.serviceHealth = new Map();
this.integrationMetrics = new Map();
}
async initialize(): Promise<void> {
try {
// Initialize HTTP clients for each service
const services = [
{ id: 'data-processor', config: this.config.dataProcessor },
{ id: 'feature-store', config: this.config.featureStore },
{ id: 'data-catalog', config: this.config.dataCatalog },
];
for (const service of services) {
if (service.config.enabled) {
const client = axios.create({
baseURL: service.config.baseUrl,
timeout: service.config.timeout || 30000,
headers: {
'Content-Type': 'application/json',
'User-Agent': 'market-data-gateway/1.0.0',
},
});
// Add request interceptor for metrics
client.interceptors.request.use((config) => {
const startTime = Date.now();
config.metadata = { startTime };
return config;
});
// Add response interceptor for metrics and error handling
client.interceptors.response.use(
(response) => {
const endTime = Date.now();
const startTime = response.config.metadata?.startTime || endTime;
this.updateMetrics(service.id, true, endTime - startTime);
return response;
},
(error) => {
const endTime = Date.now();
const startTime = error.config?.metadata?.startTime || endTime;
this.updateMetrics(service.id, false, endTime - startTime);
return Promise.reject(error);
}
);
this.httpClients.set(service.id, client);
// Initialize health tracking
this.serviceHealth.set(service.id, {
serviceId: service.id,
status: 'unreachable',
lastCheck: 0,
responseTime: 0,
errorCount: 0,
});
// Initialize metrics
this.integrationMetrics.set(service.id, {
totalRequests: 0,
successfulRequests: 0,
failedRequests: 0,
averageResponseTime: 0,
lastRequestTime: 0,
});
}
}
// Start health monitoring
this.startHealthMonitoring();
this.isInitialized = true;
this.logger.info('Service integration manager initialized successfully');
this.emit('initialized');
} catch (error) {
this.logger.error('Failed to initialize service integration manager:', error);
throw error;
}
}
async shutdown(): Promise<void> {
try {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
}
this.isInitialized = false;
this.logger.info('Service integration manager shut down successfully');
this.emit('shutdown');
} catch (error) {
this.logger.error('Error shutting down service integration manager:', error);
}
}
// Data Processor Integration
async sendToDataProcessor(data: ProcessedData[]): Promise<void> {
if (!this.config.dataProcessor.enabled) {
this.logger.debug('Data processor integration disabled');
return;
}
try {
const client = this.httpClients.get('data-processor');
if (!client) throw new Error('Data processor client not initialized');
const payload = {
source: 'market-data-gateway',
timestamp: new Date().toISOString(),
data: data,
};
const response = await client.post('/api/v1/data/ingest', payload);
this.logger.debug(`Sent ${data.length} records to data processor`);
this.emit('data-sent', { service: 'data-processor', count: data.length });
return response.data;
} catch (error) {
this.logger.error('Failed to send data to data processor:', error);
this.emit('integration-error', { service: 'data-processor', error });
throw error;
}
}
async createDataPipeline(pipelineConfig: any): Promise<string> {
if (!this.config.dataProcessor.enabled) {
throw new Error('Data processor integration disabled');
}
try {
const client = this.httpClients.get('data-processor');
if (!client) throw new Error('Data processor client not initialized');
const response = await client.post('/api/v1/pipelines', pipelineConfig);
this.logger.info(`Created data pipeline: ${response.data.id}`);
return response.data.id;
} catch (error) {
this.logger.error('Failed to create data pipeline:', error);
throw error;
}
}
async triggerPipelineJob(pipelineId: string, jobConfig: Partial<DataPipelineJob>): Promise<string> {
if (!this.config.dataProcessor.enabled) {
throw new Error('Data processor integration disabled');
}
try {
const client = this.httpClients.get('data-processor');
if (!client) throw new Error('Data processor client not initialized');
const response = await client.post(`/api/v1/pipelines/${pipelineId}/jobs`, jobConfig);
this.logger.info(`Triggered pipeline job: ${response.data.jobId}`);
return response.data.jobId;
} catch (error) {
this.logger.error('Failed to trigger pipeline job:', error);
throw error;
}
}
// Feature Store Integration
async publishToFeatureStore(features: any[]): Promise<void> {
if (!this.config.featureStore.enabled) {
this.logger.debug('Feature store integration disabled');
return;
}
try {
const client = this.httpClients.get('feature-store');
if (!client) throw new Error('Feature store client not initialized');
const payload = {
source: 'market-data-gateway',
timestamp: new Date().toISOString(),
features: features,
};
const response = await client.post('/api/v1/features/ingest', payload);
this.logger.debug(`Published ${features.length} features to feature store`);
this.emit('features-published', { count: features.length });
return response.data;
} catch (error) {
this.logger.error('Failed to publish features to feature store:', error);
this.emit('integration-error', { service: 'feature-store', error });
throw error;
}
}
async requestFeatureComputation(request: FeatureComputationRequest): Promise<any> {
if (!this.config.featureStore.enabled) {
throw new Error('Feature store integration disabled');
}
try {
const client = this.httpClients.get('feature-store');
if (!client) throw new Error('Feature store client not initialized');
const response = await client.post('/api/v1/features/compute', request);
this.logger.info(`Requested feature computation: ${request.featureGroupId}`);
return response.data;
} catch (error) {
this.logger.error('Failed to request feature computation:', error);
throw error;
}
}
async getFeatureGroup(featureGroupId: string): Promise<any> {
if (!this.config.featureStore.enabled) {
throw new Error('Feature store integration disabled');
}
try {
const client = this.httpClients.get('feature-store');
if (!client) throw new Error('Feature store client not initialized');
const response = await client.get(`/api/v1/feature-groups/${featureGroupId}`);
return response.data;
} catch (error) {
this.logger.error(`Failed to get feature group ${featureGroupId}:`, error);
throw error;
}
}
// Data Catalog Integration
async registerDataAsset(asset: Omit<DataAsset, 'id' | 'createdAt' | 'updatedAt'>): Promise<string> {
if (!this.config.dataCatalog.enabled) {
this.logger.debug('Data catalog integration disabled');
return '';
}
try {
const client = this.httpClients.get('data-catalog');
if (!client) throw new Error('Data catalog client not initialized');
const response = await client.post('/api/v1/assets', asset);
this.logger.info(`Registered data asset: ${asset.name}`);
this.emit('asset-registered', { assetId: response.data.id, name: asset.name });
return response.data.id;
} catch (error) {
this.logger.error('Failed to register data asset:', error);
this.emit('integration-error', { service: 'data-catalog', error });
throw error;
}
}
async updateDataLineage(fromAssetId: string, toAssetId: string, transformationType: string): Promise<void> {
if (!this.config.dataCatalog.enabled) {
this.logger.debug('Data catalog integration disabled');
return;
}
try {
const client = this.httpClients.get('data-catalog');
if (!client) throw new Error('Data catalog client not initialized');
const lineageData = {
fromAssetId,
toAssetId,
transformationType,
timestamp: new Date().toISOString(),
source: 'market-data-gateway',
};
await client.post('/api/v1/lineage', lineageData);
this.logger.debug(`Updated data lineage: ${fromAssetId} -> ${toAssetId}`);
this.emit('lineage-updated', lineageData);
} catch (error) {
this.logger.error('Failed to update data lineage:', error);
this.emit('integration-error', { service: 'data-catalog', error });
throw error;
}
}
async reportDataQuality(assetId: string, qualityMetrics: any): Promise<void> {
if (!this.config.dataCatalog.enabled) {
this.logger.debug('Data catalog integration disabled');
return;
}
try {
const client = this.httpClients.get('data-catalog');
if (!client) throw new Error('Data catalog client not initialized');
const qualityReport = {
assetId,
metrics: qualityMetrics,
timestamp: new Date().toISOString(),
source: 'market-data-gateway',
};
await client.post('/api/v1/quality/reports', qualityReport);
this.logger.debug(`Reported data quality for asset: ${assetId}`);
this.emit('quality-reported', { assetId, metrics: qualityMetrics });
} catch (error) {
this.logger.error('Failed to report data quality:', error);
this.emit('integration-error', { service: 'data-catalog', error });
throw error;
}
}
// Health monitoring
private startHealthMonitoring(): void {
this.healthCheckInterval = setInterval(() => {
this.checkServiceHealth();
}, 30000); // Check every 30 seconds
}
private async checkServiceHealth(): Promise<void> {
const healthPromises = Array.from(this.httpClients.entries()).map(
async ([serviceId, client]) => {
const startTime = Date.now();
try {
await client.get('/health');
const responseTime = Date.now() - startTime;
this.updateServiceHealth(serviceId, 'healthy', responseTime, false);
} catch (error) {
const responseTime = Date.now() - startTime;
this.updateServiceHealth(serviceId, 'unhealthy', responseTime, true);
}
}
);
await Promise.allSettled(healthPromises);
}
private updateServiceHealth(
serviceId: string,
status: ServiceHealth['status'],
responseTime: number,
isError: boolean
): void {
const health = this.serviceHealth.get(serviceId);
if (!health) return;
health.status = status;
health.lastCheck = Date.now();
health.responseTime = responseTime;
if (isError) {
health.errorCount++;
} else {
health.errorCount = Math.max(0, health.errorCount - 1); // Decay error count
}
this.serviceHealth.set(serviceId, health);
this.emit('service-health-updated', { serviceId, health });
}
private updateMetrics(serviceId: string, success: boolean, responseTime: number): void {
const metrics = this.integrationMetrics.get(serviceId);
if (!metrics) return;
metrics.totalRequests++;
metrics.lastRequestTime = Date.now();
if (success) {
metrics.successfulRequests++;
} else {
metrics.failedRequests++;
}
// Update average response time
const totalSuccessful = metrics.successfulRequests;
if (totalSuccessful > 0) {
metrics.averageResponseTime =
(metrics.averageResponseTime * (totalSuccessful - 1) + responseTime) / totalSuccessful;
}
this.integrationMetrics.set(serviceId, metrics);
}
// Status and metrics
getServiceHealth(serviceId?: string): ServiceHealth | ServiceHealth[] {
if (serviceId) {
return this.serviceHealth.get(serviceId) || {
serviceId,
status: 'unreachable',
lastCheck: 0,
responseTime: 0,
errorCount: 0,
};
}
return Array.from(this.serviceHealth.values());
}
getIntegrationMetrics(serviceId?: string): IntegrationMetrics | IntegrationMetrics[] {
if (serviceId) {
return this.integrationMetrics.get(serviceId) || {
totalRequests: 0,
successfulRequests: 0,
failedRequests: 0,
averageResponseTime: 0,
lastRequestTime: 0,
};
}
return Array.from(this.integrationMetrics.values());
}
getHealth(): HealthStatus {
const allHealthy = Array.from(this.serviceHealth.values()).every(
health => health.status === 'healthy'
);
const degradedServices = Array.from(this.serviceHealth.values()).filter(
health => health.status === 'degraded'
);
const unhealthyServices = Array.from(this.serviceHealth.values()).filter(
health => health.status === 'unhealthy' || health.status === 'unreachable'
);
let status: 'healthy' | 'degraded' | 'unhealthy' = 'healthy';
let message = 'All service integrations are healthy';
if (unhealthyServices.length > 0) {
status = 'unhealthy';
message = `${unhealthyServices.length} services are unhealthy`;
} else if (degradedServices.length > 0) {
status = 'degraded';
message = `${degradedServices.length} services are degraded`;
}
return {
status,
message,
timestamp: new Date().toISOString(),
details: {
isInitialized: this.isInitialized,
totalServices: this.serviceHealth.size,
healthyServices: Array.from(this.serviceHealth.values()).filter(h => h.status === 'healthy').length,
degradedServices: degradedServices.length,
unhealthyServices: unhealthyServices.length,
serviceHealth: Object.fromEntries(this.serviceHealth),
integrationMetrics: Object.fromEntries(this.integrationMetrics),
},
};
}
// Configuration management
updateServiceConfig(serviceId: string, config: Partial<ServiceEndpoint>): void {
const currentConfig = this.getServiceConfig(serviceId);
if (!currentConfig) {
this.logger.error(`Service ${serviceId} not found for config update`);
return;
}
// Update the configuration
Object.assign(currentConfig, config);
// Reinitialize the HTTP client if URL changed
if (config.baseUrl) {
const client = this.httpClients.get(serviceId);
if (client) {
client.defaults.baseURL = config.baseUrl;
client.defaults.timeout = config.timeout || client.defaults.timeout;
}
}
this.logger.info(`Updated configuration for service: ${serviceId}`);
this.emit('service-config-updated', { serviceId, config });
}
private getServiceConfig(serviceId: string): any {
switch (serviceId) {
case 'data-processor':
return this.config.dataProcessor;
case 'feature-store':
return this.config.featureStore;
case 'data-catalog':
return this.config.dataCatalog;
default:
return null;
}
}
}

View file

@ -1,617 +0,0 @@
import { EventEmitter } from 'eventemitter3';
import { Logger } from 'pino';
import WebSocket from 'ws';
import {
SubscriptionRequest,
ClientSubscription,
WebSocketMessage,
WebSocketDataMessage,
MarketDataTick,
MarketDataCandle,
MarketDataTrade
} from '../types/MarketDataGateway';
interface WebSocketClient {
id: string;
ws: WebSocket;
subscriptions: Set<string>;
connectedAt: Date;
lastPing: Date;
metadata: {
userAgent?: string;
ip?: string;
userId?: string;
};
}
export class SubscriptionManager extends EventEmitter {
private logger: Logger;
private subscriptions: Map<string, ClientSubscription> = new Map();
private clients: Map<string, WebSocketClient> = new Map();
private symbolSubscriptions: Map<string, Set<string>> = new Map(); // symbol -> subscription IDs
private heartbeatInterval?: NodeJS.Timeout;
private cleanupInterval?: NodeJS.Timeout;
constructor(logger: Logger) {
super();
this.logger = logger;
}
public async start(): Promise<void> {
this.logger.info('Starting Subscription Manager');
// Start heartbeat for WebSocket clients
this.startHeartbeat();
// Start cleanup for stale subscriptions
this.startCleanup();
this.logger.info('Subscription Manager started');
}
public async stop(): Promise<void> {
this.logger.info('Stopping Subscription Manager');
// Clear intervals
if (this.heartbeatInterval) {
clearInterval(this.heartbeatInterval);
}
if (this.cleanupInterval) {
clearInterval(this.cleanupInterval);
}
// Close all WebSocket connections
for (const client of this.clients.values()) {
client.ws.close();
}
this.clients.clear();
this.subscriptions.clear();
this.symbolSubscriptions.clear();
this.logger.info('Subscription Manager stopped');
}
public async subscribe(request: SubscriptionRequest): Promise<string> {
this.logger.info({
clientId: request.clientId,
symbols: request.symbols,
dataTypes: request.dataTypes
}, 'Creating subscription');
// Validate subscription request
this.validateSubscriptionRequest(request);
// Create subscription
const subscription: ClientSubscription = {
request,
status: 'active',
connectedAt: new Date(),
lastUpdate: new Date(),
metrics: {
messagesDelivered: 0,
bytesTransferred: 0,
errors: 0,
avgLatencyMs: 0
}
};
this.subscriptions.set(request.id, subscription);
// Track symbol subscriptions for efficient lookup
for (const symbol of request.symbols) {
if (!this.symbolSubscriptions.has(symbol)) {
this.symbolSubscriptions.set(symbol, new Set());
}
this.symbolSubscriptions.get(symbol)!.add(request.id);
}
this.emit('subscribed', subscription);
this.logger.info({ subscriptionId: request.id }, 'Subscription created');
return request.id;
}
public async unsubscribe(subscriptionId: string): Promise<void> {
const subscription = this.subscriptions.get(subscriptionId);
if (!subscription) {
throw new Error(`Subscription ${subscriptionId} not found`);
}
this.logger.info({ subscriptionId }, 'Removing subscription');
// Remove from symbol tracking
for (const symbol of subscription.request.symbols) {
const symbolSubs = this.symbolSubscriptions.get(symbol);
if (symbolSubs) {
symbolSubs.delete(subscriptionId);
if (symbolSubs.size === 0) {
this.symbolSubscriptions.delete(symbol);
}
}
}
// Remove subscription
this.subscriptions.delete(subscriptionId);
this.emit('unsubscribed', subscription.request.clientId);
this.logger.info({ subscriptionId }, 'Subscription removed');
}
public getSubscriptions(clientId?: string): ClientSubscription[] {
const subscriptions = Array.from(this.subscriptions.values());
if (clientId) {
return subscriptions.filter(sub => sub.request.clientId === clientId);
}
return subscriptions;
}
public async broadcast(data: MarketDataTick | MarketDataCandle | MarketDataTrade): Promise<void> {
const symbol = data.symbol;
const dataType = this.getDataType(data);
// Get subscriptions for this symbol
const subscriptionIds = this.symbolSubscriptions.get(symbol);
if (!subscriptionIds || subscriptionIds.size === 0) {
return;
}
const deliveryPromises: Promise<void>[] = [];
for (const subscriptionId of subscriptionIds) {
const subscription = this.subscriptions.get(subscriptionId);
if (!subscription || subscription.status !== 'active') {
continue;
}
// Check if subscription wants this data type
if (!subscription.request.dataTypes.includes(dataType as any)) {
continue;
}
// Apply filters
if (!this.passesFilters(data, subscription.request.filters)) {
continue;
}
// Apply throttling if configured
if (subscription.request.throttle && !this.passesThrottle(subscription)) {
continue;
}
// Deliver data based on delivery method
deliveryPromises.push(this.deliverData(subscription, data));
}
// Wait for all deliveries
await Promise.allSettled(deliveryPromises);
}
public addWebSocketClient(ws: WebSocket, clientId: string, metadata: any = {}): void {
this.logger.info({ clientId }, 'Adding WebSocket client');
const client: WebSocketClient = {
id: clientId,
ws,
subscriptions: new Set(),
connectedAt: new Date(),
lastPing: new Date(),
metadata
};
this.clients.set(clientId, client);
// Setup WebSocket event handlers
ws.on('message', (message: Buffer) => {
this.handleWebSocketMessage(clientId, message);
});
ws.on('close', () => {
this.removeWebSocketClient(clientId);
});
ws.on('error', (error) => {
this.logger.error({ clientId, error }, 'WebSocket client error');
this.removeWebSocketClient(clientId);
});
ws.on('pong', () => {
const client = this.clients.get(clientId);
if (client) {
client.lastPing = new Date();
}
});
// Send welcome message
this.sendWebSocketMessage(ws, {
type: 'status',
id: 'welcome',
timestamp: Date.now(),
payload: {
status: 'connected',
clientId,
serverTime: new Date().toISOString()
}
});
}
public removeWebSocketClient(clientId: string): void {
const client = this.clients.get(clientId);
if (!client) {
return;
}
this.logger.info({ clientId }, 'Removing WebSocket client');
// Unsubscribe from all subscriptions
for (const subscriptionId of client.subscriptions) {
try {
this.unsubscribe(subscriptionId);
} catch (error) {
this.logger.error({ subscriptionId, error }, 'Error unsubscribing client');
}
}
// Close WebSocket if still open
if (client.ws.readyState === WebSocket.OPEN) {
client.ws.close();
}
this.clients.delete(clientId);
}
private validateSubscriptionRequest(request: SubscriptionRequest): void {
if (!request.id) {
throw new Error('Subscription ID is required');
}
if (!request.clientId) {
throw new Error('Client ID is required');
}
if (!request.symbols || request.symbols.length === 0) {
throw new Error('At least one symbol is required');
}
if (!request.dataTypes || request.dataTypes.length === 0) {
throw new Error('At least one data type is required');
}
if (this.subscriptions.has(request.id)) {
throw new Error(`Subscription ${request.id} already exists`);
}
// Validate symbols format
for (const symbol of request.symbols) {
if (typeof symbol !== 'string' || symbol.length === 0) {
throw new Error(`Invalid symbol: ${symbol}`);
}
}
// Validate data types
const validDataTypes = ['quotes', 'trades', 'orderbook', 'candles', 'news'];
for (const dataType of request.dataTypes) {
if (!validDataTypes.includes(dataType)) {
throw new Error(`Invalid data type: ${dataType}`);
}
}
}
private getDataType(data: any): string {
if (data.id && data.side) return 'trades';
if (data.open !== undefined && data.high !== undefined) return 'candles';
if (data.price !== undefined) return 'quotes';
if (data.bids || data.asks) return 'orderbook';
return 'unknown';
}
private passesFilters(data: any, filters?: any): boolean {
if (!filters) {
return true;
}
// Price range filter
if (filters.priceRange && data.price) {
if (data.price < filters.priceRange.min || data.price > filters.priceRange.max) {
return false;
}
}
// Volume threshold filter
if (filters.volumeThreshold && data.volume) {
if (data.volume < filters.volumeThreshold) {
return false;
}
}
// Exchange filter
if (filters.exchanges && data.exchange) {
if (!filters.exchanges.includes(data.exchange)) {
return false;
}
}
return true;
}
private passesThrottle(subscription: ClientSubscription): boolean {
const throttle = subscription.request.throttle;
if (!throttle) {
return true;
}
const now = Date.now();
const timeSinceLastUpdate = now - subscription.lastUpdate.getTime();
const minInterval = 1000 / throttle.maxUpdatesPerSecond;
return timeSinceLastUpdate >= minInterval;
}
private async deliverData(subscription: ClientSubscription, data: any): Promise<void> {
const startTime = Date.now();
try {
const message: WebSocketDataMessage = {
type: 'data',
id: subscription.request.id,
timestamp: Date.now(),
payload: {
dataType: this.getDataType(data),
data
}
};
switch (subscription.request.delivery.method) {
case 'websocket':
await this.deliverViaWebSocket(subscription, message);
break;
case 'webhook':
await this.deliverViaWebhook(subscription, message);
break;
case 'eventbus':
await this.deliverViaEventBus(subscription, message);
break;
default:
throw new Error(`Unsupported delivery method: ${subscription.request.delivery.method}`);
}
// Update metrics
const latency = Date.now() - startTime;
subscription.metrics.messagesDelivered++;
subscription.metrics.avgLatencyMs =
(subscription.metrics.avgLatencyMs * (subscription.metrics.messagesDelivered - 1) + latency) /
subscription.metrics.messagesDelivered;
subscription.lastUpdate = new Date();
} catch (error) {
this.logger.error({
subscriptionId: subscription.request.id,
error
}, 'Error delivering data');
subscription.metrics.errors++;
if (subscription.metrics.errors > 10) {
subscription.status = 'error';
this.emit('error', error, subscription.request.clientId);
}
}
}
private async deliverViaWebSocket(subscription: ClientSubscription, message: WebSocketDataMessage): Promise<void> {
const client = this.clients.get(subscription.request.clientId);
if (!client || client.ws.readyState !== WebSocket.OPEN) {
throw new Error('WebSocket client not available');
}
this.sendWebSocketMessage(client.ws, message);
const messageSize = JSON.stringify(message).length;
subscription.metrics.bytesTransferred += messageSize;
}
private async deliverViaWebhook(subscription: ClientSubscription, message: any): Promise<void> {
// Webhook delivery implementation would go here
// This would use HTTP POST to deliver the data
throw new Error('Webhook delivery not implemented');
}
private async deliverViaEventBus(subscription: ClientSubscription, message: any): Promise<void> {
// Event bus delivery implementation would go here
// This would publish to the event bus
this.emit('event-bus-delivery', subscription.request.clientId, message);
}
private sendWebSocketMessage(ws: WebSocket, message: WebSocketMessage): void {
if (ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify(message));
}
}
private handleWebSocketMessage(clientId: string, message: Buffer): void {
try {
const parsedMessage = JSON.parse(message.toString()) as WebSocketMessage;
switch (parsedMessage.type) {
case 'subscribe':
this.handleWebSocketSubscribe(clientId, parsedMessage as any);
break;
case 'unsubscribe':
this.handleWebSocketUnsubscribe(clientId, parsedMessage);
break;
case 'heartbeat':
this.handleWebSocketHeartbeat(clientId);
break;
default:
this.logger.warn({ clientId, messageType: parsedMessage.type }, 'Unknown WebSocket message type');
}
} catch (error) {
this.logger.error({ clientId, error }, 'Error parsing WebSocket message');
}
}
private async handleWebSocketSubscribe(clientId: string, message: any): Promise<void> {
try {
const subscriptionRequest: SubscriptionRequest = {
id: `${clientId}-${Date.now()}`,
clientId,
symbols: message.payload.symbols,
dataTypes: message.payload.dataTypes,
filters: message.payload.filters,
throttle: message.payload.throttle,
delivery: {
method: 'websocket',
format: 'json'
}
};
const subscriptionId = await this.subscribe(subscriptionRequest);
const client = this.clients.get(clientId);
if (client) {
client.subscriptions.add(subscriptionId);
}
// Send confirmation
const confirmationMessage: WebSocketMessage = {
type: 'status',
id: message.id,
timestamp: Date.now(),
payload: {
status: 'subscribed',
subscriptionId,
symbols: subscriptionRequest.symbols,
dataTypes: subscriptionRequest.dataTypes
}
};
const ws = this.clients.get(clientId)?.ws;
if (ws) {
this.sendWebSocketMessage(ws, confirmationMessage);
}
} catch (error) {
this.logger.error({ clientId, error }, 'Error handling WebSocket subscribe');
// Send error message
const errorMessage: WebSocketMessage = {
type: 'error',
id: message.id,
timestamp: Date.now(),
payload: {
error: error instanceof Error ? error.message : 'Unknown error'
}
};
const ws = this.clients.get(clientId)?.ws;
if (ws) {
this.sendWebSocketMessage(ws, errorMessage);
}
}
}
private async handleWebSocketUnsubscribe(clientId: string, message: WebSocketMessage): Promise<void> {
try {
const subscriptionId = message.payload?.subscriptionId;
if (!subscriptionId) {
throw new Error('Subscription ID is required');
}
await this.unsubscribe(subscriptionId);
const client = this.clients.get(clientId);
if (client) {
client.subscriptions.delete(subscriptionId);
}
// Send confirmation
const confirmationMessage: WebSocketMessage = {
type: 'status',
id: message.id,
timestamp: Date.now(),
payload: {
status: 'unsubscribed',
subscriptionId
}
};
const ws = this.clients.get(clientId)?.ws;
if (ws) {
this.sendWebSocketMessage(ws, confirmationMessage);
}
} catch (error) {
this.logger.error({ clientId, error }, 'Error handling WebSocket unsubscribe');
}
}
private handleWebSocketHeartbeat(clientId: string): void {
const client = this.clients.get(clientId);
if (client) {
client.lastPing = new Date();
const heartbeatMessage: WebSocketMessage = {
type: 'heartbeat',
timestamp: Date.now(),
payload: {
serverTime: new Date().toISOString()
}
};
this.sendWebSocketMessage(client.ws, heartbeatMessage);
}
}
private startHeartbeat(): void {
this.heartbeatInterval = setInterval(() => {
const now = Date.now();
const timeout = 60000; // 60 seconds
for (const [clientId, client] of this.clients.entries()) {
const timeSinceLastPing = now - client.lastPing.getTime();
if (timeSinceLastPing > timeout) {
this.logger.warn({ clientId }, 'Client heartbeat timeout');
this.removeWebSocketClient(clientId);
} else if (client.ws.readyState === WebSocket.OPEN) {
// Send ping
client.ws.ping();
}
}
}, 30000); // Check every 30 seconds
}
private startCleanup(): void {
this.cleanupInterval = setInterval(() => {
const now = Date.now();
const maxAge = 24 * 60 * 60 * 1000; // 24 hours
for (const [subscriptionId, subscription] of this.subscriptions.entries()) {
const age = now - subscription.connectedAt.getTime();
if (subscription.status === 'error' || age > maxAge) {
this.logger.info({ subscriptionId }, 'Cleaning up stale subscription');
this.unsubscribe(subscriptionId);
}
}
}, 60000); // Check every minute
}
public getMetrics() {
return {
totalSubscriptions: this.subscriptions.size,
activeSubscriptions: Array.from(this.subscriptions.values())
.filter(sub => sub.status === 'active').length,
connectedClients: this.clients.size,
symbolsTracked: this.symbolSubscriptions.size,
totalMessagesDelivered: Array.from(this.subscriptions.values())
.reduce((sum, sub) => sum + sub.metrics.messagesDelivered, 0),
totalErrors: Array.from(this.subscriptions.values())
.reduce((sum, sub) => sum + sub.metrics.errors, 0)
};
}
}

View file

@ -1,5 +0,0 @@
// Shared components used by both realtime and storage modules
export { CacheManager } from '../services/CacheManager';
export { DataNormalizer } from '../services/DataNormalizer';
export { MetricsCollector } from '../services/MetricsCollector';
export { ServiceIntegrationManager } from '../services/ServiceIntegrationManager';

View file

@ -1,52 +0,0 @@
/**
* Archival service for managing data lifecycle and storage tiers
* Handles cold storage, data compression, and retention policies
*/
export class ArchivalService {
private compressionLevel: number;
private retentionPolicies: Map<string, number>;
constructor() {
this.compressionLevel = 6; // Default compression level
this.retentionPolicies = new Map();
}
/**
* Archive old data to cold storage
*/
async archiveData(symbol: string, cutoffDate: Date): Promise<void> {
try {
console.log(`Archiving data for ${symbol} before ${cutoffDate}`);
// Implementation for archiving
} catch (error) {
console.error('Error archiving data:', error);
throw error;
}
}
/**
* Compress data for storage optimization
*/
async compressData(data: any[]): Promise<Buffer> {
try {
// Implementation for data compression
return Buffer.from(JSON.stringify(data));
} catch (error) {
console.error('Error compressing data:', error);
throw error;
}
}
/**
* Apply retention policies
*/
async applyRetentionPolicies(): Promise<void> {
try {
console.log('Applying retention policies...');
// Implementation for applying retention policies
} catch (error) {
console.error('Error applying retention policies:', error);
throw error;
}
}
}

View file

@ -1,46 +0,0 @@
import { TimeSeriesStorage } from './TimeSeriesStorage';
/**
* Query engine for efficient historical data retrieval
* Optimizes queries and provides various aggregation capabilities
*/
export class QueryEngine {
private storage: TimeSeriesStorage;
constructor(storage: TimeSeriesStorage) {
this.storage = storage;
}
/**
* Execute optimized query with caching
*/
async executeQuery(queryParams: any): Promise<any> {
try {
// Implementation for optimized queries
console.log('Executing optimized query:', queryParams);
return [];
} catch (error) {
console.error('Error executing query:', error);
throw error;
}
}
/**
* Aggregate data by time intervals
*/
async aggregateByInterval(
symbol: string,
interval: string,
startTime: Date,
endTime: Date
): Promise<any[]> {
try {
// Implementation for aggregation
console.log(`Aggregating ${symbol} by ${interval}`);
return [];
} catch (error) {
console.error('Error aggregating data:', error);
throw error;
}
}
}

View file

@ -1,78 +0,0 @@
import { CacheManager } from '../services/CacheManager';
import { DataNormalizer } from '../services/DataNormalizer';
import { MetricsCollector } from '../services/MetricsCollector';
/**
* Historical data storage and retrieval service
* Handles time-series storage, archival, and query capabilities
*/
export class TimeSeriesStorage {
private cache: CacheManager;
private normalizer: DataNormalizer;
private metrics: MetricsCollector;
constructor(
cache: CacheManager,
normalizer: DataNormalizer,
metrics: MetricsCollector
) {
this.cache = cache;
this.normalizer = normalizer;
this.metrics = metrics;
}
/**
* Store historical market data
*/
async storeHistoricalData(symbol: string, data: any[]): Promise<void> {
try {
// Implementation for storing historical data
console.log(`Storing historical data for ${symbol}:`, data.length, 'records');
await this.metrics.incrementCounter('historical_data_stored', { symbol });
} catch (error) {
console.error('Error storing historical data:', error);
throw error;
}
}
/**
* Query historical data by time range
*/
async queryTimeRange(
symbol: string,
startTime: Date,
endTime: Date,
interval?: string
): Promise<any[]> {
try {
// Implementation for querying time range data
console.log(`Querying ${symbol} from ${startTime} to ${endTime}`);
await this.metrics.incrementCounter('historical_query', { symbol });
// Return mock data for now
return [];
} catch (error) {
console.error('Error querying historical data:', error);
throw error;
}
}
/**
* Get data statistics and metadata
*/
async getDataStats(symbol: string): Promise<any> {
try {
// Implementation for getting data statistics
return {
symbol,
recordCount: 0,
firstRecord: null,
lastRecord: null,
intervals: []
};
} catch (error) {
console.error('Error getting data stats:', error);
throw error;
}
}
}

View file

@ -1,4 +0,0 @@
// Storage and historical data components
export { TimeSeriesStorage } from './TimeSeriesStorage';
export { QueryEngine } from './QueryEngine';
export { ArchivalService } from './ArchivalService';

View file

@ -1,426 +0,0 @@
// Market Data Gateway Types - Consolidated and organized
// Market Data Types
export interface MarketDataTick {
symbol: string;
timestamp: number;
price: number;
volume: number;
bid?: number;
ask?: number;
bidSize?: number;
askSize?: number;
source: string;
exchange?: string;
lastTradeSize?: number;
dayHigh?: number;
dayLow?: number;
dayOpen?: number;
prevClose?: number;
change?: number;
changePercent?: number;
}
export interface MarketDataCandle {
symbol: string;
timestamp: number;
open: number;
high: number;
low: number;
close: number;
volume: number;
timeframe: string;
source: string;
exchange?: string;
vwap?: number;
trades?: number;
}
export interface MarketDataTrade {
id: string;
symbol: string;
timestamp: number;
price: number;
size: number;
side: 'buy' | 'sell';
source: string;
exchange?: string;
conditions?: string[];
}
export interface MarketDataOrder {
id: string;
symbol: string;
timestamp: number;
side: 'buy' | 'sell';
price: number;
size: number;
source: string;
exchange?: string;
orderType?: 'market' | 'limit' | 'stop';
level?: number;
}
// Data Source Configuration
export interface DataSourceConfig {
id: string;
name: string;
type: 'websocket' | 'rest' | 'fix' | 'stream';
provider: string;
enabled: boolean;
priority: number;
rateLimit: {
requestsPerSecond: number;
burstLimit: number;
};
connection: {
url: string;
headers?: Record<string, string>;
queryParams?: Record<string, string>;
authentication?: {
type: 'apikey' | 'oauth' | 'basic' | 'jwt';
credentials: Record<string, string>;
};
};
subscriptions: {
quotes: boolean;
trades: boolean;
orderbook: boolean;
candles: boolean;
news: boolean;
};
symbols: string[];
retryPolicy: {
maxRetries: number;
backoffMultiplier: number;
maxBackoffMs: number;
};
healthCheck: {
intervalMs: number;
timeoutMs: number;
expectedLatencyMs: number;
};
}
// Data Processing Pipeline
export interface DataProcessor {
id: string;
name: string;
type: 'enrichment' | 'validation' | 'normalization' | 'aggregation' | 'filter';
enabled: boolean;
priority: number;
config: Record<string, any>;
process(data: MarketDataTick | MarketDataCandle | MarketDataTrade): Promise<any>;
}
export interface ProcessingPipeline {
id: string;
name: string;
processors: DataProcessor[];
inputFilter: {
symbols?: string[];
sources?: string[];
dataTypes?: string[];
};
outputTargets: {
eventBus?: boolean;
database?: boolean;
cache?: boolean;
websocket?: boolean;
dataProcessor?: boolean;
featureStore?: boolean;
};
}
// ProcessingPipelineConfig is an alias for ProcessingPipeline
export type ProcessingPipelineConfig = ProcessingPipeline;
// Subscription Management
export interface SubscriptionRequest {
id: string;
clientId: string;
symbols: string[];
dataTypes: ('quotes' | 'trades' | 'orderbook' | 'candles' | 'news')[];
filters?: {
priceRange?: { min: number; max: number };
volumeThreshold?: number;
exchanges?: string[];
};
throttle?: {
maxUpdatesPerSecond: number;
aggregationWindow?: number;
};
delivery: {
method: 'websocket' | 'webhook' | 'eventbus';
endpoint?: string;
format: 'json' | 'protobuf' | 'avro';
};
}
export interface ClientSubscription {
request: SubscriptionRequest;
status: 'active' | 'paused' | 'error' | 'stopped';
connectedAt: Date;
lastUpdate: Date;
metrics: {
messagesDelivered: number;
bytesTransferred: number;
errors: number;
avgLatencyMs: number;
};
}
// Gateway Configuration
export interface GatewayConfig {
server: {
port: number;
host: string;
maxConnections: number;
cors: {
origins: string[];
methods: string[];
headers: string[];
};
};
dataSources: DataSourceConfig[];
processing: {
pipelines: ProcessingPipeline[];
bufferSize: number;
batchSize: number;
flushIntervalMs: number;
};
cache: {
redis: {
host: string;
port: number;
password?: string;
db: number;
};
ttl: {
quotes: number;
trades: number;
candles: number;
orderbook: number;
};
}; monitoring: {
metrics: {
enabled: boolean;
port: number;
intervalMs: number;
retention: string;
};
alerts: {
enabled: boolean;
thresholds: {
errorRate: number;
latency: number;
latencyMs: number;
connectionLoss: number;
};
};
};
}
// Metrics and Monitoring
export interface DataSourceMetrics {
sourceId: string;
status: 'connected' | 'disconnected' | 'error';
messagesReceived: number;
bytesReceived: number;
latencyMs: number;
errorCount: number;
lastUpdate: Date;
}
export interface GatewayMetrics {
timestamp: Date;
uptime: number;
system: {
cpuUsage: number;
memoryUsage: number;
diskUsage: number;
networkIO: {
bytesIn: number;
bytesOut: number;
};
};
dataSources: DataSourceMetrics[];
subscriptions: {
total: number;
active: number;
byDataType: Record<string, number>;
};
processing: {
messagesPerSecond: number;
avgProcessingTimeMs: number;
queueDepth: number;
errorRate: number;
};
}
// Health Check
export interface HealthStatus {
service: string;
status: 'healthy' | 'degraded' | 'unhealthy';
timestamp: Date;
uptime: number;
version: string;
dependencies: {
name: string;
status: 'healthy' | 'unhealthy';
latencyMs?: number;
error?: string;
}[];
metrics: {
connectionsActive: number;
messagesPerSecond: number;
errorRate: number;
avgLatencyMs: number;
};
}
// WebSocket Types
export interface WebSocketMessage {
type: string;
payload: any;
timestamp: number;
id?: string;
}
export interface WebSocketSubscribeMessage extends WebSocketMessage {
type: 'subscribe';
payload: SubscriptionRequest;
}
export interface WebSocketDataMessage extends WebSocketMessage {
type: 'data';
payload: MarketDataTick | MarketDataTrade | MarketDataCandle | MarketDataOrder;
dataType?: string;
}
// Error Types
export interface DataSourceError {
sourceId: string;
timestamp: Date;
type: 'connection' | 'authentication' | 'ratelimit' | 'data' | 'timeout';
message: string;
details?: Record<string, any>;
severity: 'low' | 'medium' | 'high' | 'critical';
recoverable: boolean;
}
// Event Types
export interface MarketDataEvent {
id: string;
type: 'market.tick' | 'market.trade' | 'market.candle' | 'market.orderbook';
source: 'market-data-gateway';
timestamp: Date;
data: MarketDataTick | MarketDataTrade | MarketDataCandle | MarketDataOrder;
metadata?: Record<string, any>;
}
// Processing and Integration Types
export interface ProcessingError {
code: string;
message: string;
timestamp: Date;
data?: any;
source?: string;
}
export interface ServiceIntegration {
serviceName: string;
endpoint: string;
enabled: boolean;
config: Record<string, any>;
dataProcessor: {
enabled: boolean;
endpoint: string;
timeout: number;
retries: number;
};
featureStore: {
enabled: boolean;
endpoint: string;
timeout: number;
retries: number;
};
dataCatalog: {
enabled: boolean;
endpoint: string;
timeout: number;
retries: number;
};
}
export interface Logger {
info(message: string, ...args: any[]): void;
error(message: string, ...args: any[]): void;
warn(message: string, ...args: any[]): void;
debug(message: string, ...args: any[]): void;
}
export interface ProcessedData {
source: string;
timestamp: Date;
data: any;
processedAt: Date;
metadata?: Record<string, any>;
}
export interface DataPipelineJob {
id: string;
type: string;
status: 'pending' | 'running' | 'completed' | 'failed';
data: any;
createdAt: Date;
startedAt?: Date;
completedAt?: Date;
}
export interface FeatureComputationRequest {
featureGroupId: string;
features: string[];
data: any;
timestamp: Date;
metadata?: Record<string, any>;
}
export interface DataAsset {
id: string;
name: string;
type: string;
source: string;
metadata: Record<string, any>;
createdAt: Date;
updatedAt: Date;
}
// Add missing types
export interface CacheConfig {
redis: {
host: string;
port: number;
password?: string;
db: number;
};
ttl: {
quotes: number;
trades: number;
candles: number;
orderbook: number;
};
}
export interface ProcessingMetrics {
totalProcessed: number;
processedPerSecond: number;
processingLatency: number;
errorCount: number;
}
export interface SubscriptionMetrics {
totalSubscriptions: number;
messagesSent: number;
sendRate: number;
}

View file

@ -1,32 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"module": "ESNext",
"moduleResolution": "bundler",
"types": ["bun-types"],
"baseUrl": "../../../",
"paths": {
"@stock-bot/*": ["libs/*/src", "libs/*/dist"]
},
"rootDir": "../../../"
},
"include": [
"src/**/*",
"../../../libs/*/src/**/*"
],
"exclude": [
"node_modules",
"dist",
"../../../libs/*/examples/**/*",
"../../../libs/**/*.test.ts",
"../../../libs/**/*.spec.ts"
],
"references": [
{ "path": "../../../libs/config" },
{ "path": "../../../libs/types" },
{ "path": "../../../libs/logger" },
{ "path": "../../../libs/http-client" },
{ "path": "../../../libs/event-bus" }
]
}

View file

@ -1,22 +0,0 @@
{
"name": "risk-guardian",
"version": "1.0.0",
"description": "Real-time risk monitoring and controls service",
"main": "src/index.ts",
"scripts": {
"dev": "bun run --watch src/index.ts",
"start": "bun run src/index.ts",
"test": "echo 'No tests yet'"
},
"dependencies": {
"hono": "^4.6.3",
"ioredis": "^5.4.1",
"@stock-bot/config": "*",
"@stock-bot/types": "*",
"ws": "^8.18.0"
},
"devDependencies": {
"bun-types": "^1.2.15",
"@types/ws": "^8.5.12"
}
}

View file

@ -1,245 +0,0 @@
import { Hono } from 'hono';
import { WebSocketServer } from 'ws';
import Redis from 'ioredis';
const app = new Hono();
const redis = new Redis({
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT || '6379'),
enableReadyCheck: false,
maxRetriesPerRequest: null,
});
// WebSocket server for real-time risk alerts
const wss = new WebSocketServer({ port: 8081 });
// Risk thresholds configuration
interface RiskThresholds {
maxPositionSize: number;
maxDailyLoss: number;
maxPortfolioRisk: number;
volatilityLimit: number;
}
const defaultThresholds: RiskThresholds = {
maxPositionSize: 100000, // $100k max position
maxDailyLoss: 10000, // $10k max daily loss
maxPortfolioRisk: 0.02, // 2% portfolio risk
volatilityLimit: 0.3 // 30% volatility limit
};
// Health check endpoint
app.get('/health', (c) => {
return c.json({
service: 'risk-guardian',
status: 'healthy',
timestamp: new Date(),
version: '1.0.0',
connections: wss.clients.size
});
});
// Get risk thresholds
app.get('/api/risk/thresholds', async (c) => {
try {
const thresholds = await redis.hgetall('risk:thresholds');
const parsedThresholds = Object.keys(thresholds).length > 0
? Object.fromEntries(
Object.entries(thresholds).map(([k, v]) => [k, parseFloat(v as string)])
)
: defaultThresholds;
return c.json({
success: true,
data: parsedThresholds
});
} catch (error) {
console.error('Error fetching risk thresholds:', error);
return c.json({ success: false, error: 'Failed to fetch thresholds' }, 500);
}
});
// Update risk thresholds
app.put('/api/risk/thresholds', async (c) => {
try {
const thresholds = await c.req.json();
await redis.hmset('risk:thresholds', thresholds);
// Broadcast threshold update to connected clients
const message = JSON.stringify({
type: 'THRESHOLD_UPDATE',
data: thresholds,
timestamp: new Date()
});
wss.clients.forEach(client => {
if (client.readyState === 1) { // WebSocket.OPEN
client.send(message);
}
});
return c.json({ success: true, data: thresholds });
} catch (error) {
console.error('Error updating risk thresholds:', error);
return c.json({ success: false, error: 'Failed to update thresholds' }, 500);
}
});
// Real-time risk monitoring endpoint
app.post('/api/risk/evaluate', async (c) => {
try {
const { symbol, quantity, price, portfolioValue } = await c.req.json();
const thresholds = await redis.hgetall('risk:thresholds');
const activeThresholds = Object.keys(thresholds).length > 0
? Object.fromEntries(
Object.entries(thresholds).map(([k, v]) => [k, parseFloat(v as string)])
)
: defaultThresholds;
const positionValue = quantity * price;
const positionRisk = positionValue / portfolioValue;
const riskEvaluation = {
symbol,
positionValue,
positionRisk,
violations: [] as string[],
riskLevel: 'LOW' as 'LOW' | 'MEDIUM' | 'HIGH'
};
// Check risk violations
if (positionValue > activeThresholds.maxPositionSize) {
riskEvaluation.violations.push(`Position size exceeds limit: $${positionValue.toLocaleString()}`);
}
if (positionRisk > activeThresholds.maxPortfolioRisk) {
riskEvaluation.violations.push(`Portfolio risk exceeds limit: ${(positionRisk * 100).toFixed(2)}%`);
}
// Determine risk level
if (riskEvaluation.violations.length > 0) {
riskEvaluation.riskLevel = 'HIGH';
} else if (positionRisk > activeThresholds.maxPortfolioRisk * 0.7) {
riskEvaluation.riskLevel = 'MEDIUM';
}
// Store risk evaluation
await redis.setex(
`risk:evaluation:${symbol}:${Date.now()}`,
3600, // 1 hour TTL
JSON.stringify(riskEvaluation)
);
// Send real-time alert if high risk
if (riskEvaluation.riskLevel === 'HIGH') {
const alert = {
type: 'RISK_ALERT',
level: 'HIGH',
data: riskEvaluation,
timestamp: new Date()
};
wss.clients.forEach(client => {
if (client.readyState === 1) {
client.send(JSON.stringify(alert));
}
});
}
return c.json({ success: true, data: riskEvaluation });
} catch (error) {
console.error('Error evaluating risk:', error);
return c.json({ success: false, error: 'Failed to evaluate risk' }, 500);
}
});
// Get risk history
app.get('/api/risk/history', async (c) => {
try {
const keys = await redis.keys('risk:evaluation:*');
const evaluations: any[] = [];
for (const key of keys.slice(0, 100)) { // Limit to 100 recent evaluations
const data = await redis.get(key);
if (data) {
evaluations.push(JSON.parse(data));
}
}
return c.json({
success: true,
data: evaluations.sort((a: any, b: any) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime())
});
} catch (error) {
console.error('Error fetching risk history:', error);
return c.json({ success: false, error: 'Failed to fetch risk history' }, 500);
}
});
// WebSocket connection handling
wss.on('connection', (ws) => {
console.log('New risk monitoring client connected');
// Send welcome message
ws.send(JSON.stringify({
type: 'CONNECTED',
message: 'Connected to Risk Guardian',
timestamp: new Date()
}));
ws.on('close', () => {
console.log('Risk monitoring client disconnected');
});
ws.on('error', (error) => {
console.error('WebSocket error:', error);
});
});
// Redis event subscriptions for cross-service communication
redis.subscribe('trading:position:opened', 'trading:position:closed');
redis.on('message', async (channel, message) => {
try {
const data = JSON.parse(message);
if (channel === 'trading:position:opened') {
// Auto-evaluate risk for new positions
const evaluation = await evaluatePositionRisk(data);
// Broadcast to connected clients
wss.clients.forEach(client => {
if (client.readyState === 1) {
client.send(JSON.stringify({
type: 'POSITION_RISK_UPDATE',
data: evaluation,
timestamp: new Date()
}));
}
});
}
} catch (error) {
console.error('Error processing Redis message:', error);
}
});
async function evaluatePositionRisk(position: any) {
// Implementation would evaluate position against current thresholds
// This is a simplified version
return {
symbol: position.symbol,
riskLevel: 'LOW',
timestamp: new Date()
};
}
const port = parseInt(process.env.PORT || '3002');
console.log(`🛡️ Risk Guardian starting on port ${port}`);
console.log(`📡 WebSocket server running on port 8081`);
export default {
port,
fetch: app.fetch,
};

View file

@ -1,12 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"module": "ESNext",
"moduleResolution": "bundler",
"types": ["bun-types"]
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,40 +0,0 @@
{
"name": "@stock-bot/data-catalog",
"version": "1.0.0",
"private": true,
"description": "Data catalog and discovery service for stock-bot",
"type": "module",
"main": "dist/index.js",
"scripts": {
"dev": "bun run --hot src/index.ts",
"build": "tsc",
"start": "node dist/index.js",
"test": "bun test",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@stock-bot/types": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/event-bus": "workspace:*",
"@stock-bot/api-client": "workspace:*",
"hono": "^4.0.0",
"zod": "^3.22.0",
"elasticsearch": "^16.7.3",
"neo4j-driver": "^5.15.0",
"cron": "^3.1.6",
"uuid": "^9.0.1"
},
"devDependencies": {
"@types/uuid": "^9.0.8",
"@types/cron": "^2.4.0",
"@types/node": "^20.0.0",
"typescript": "^5.3.0"
},
"keywords": [
"data-catalog",
"data-discovery",
"data-lineage",
"metadata",
"stock-bot"
]
}

View file

@ -1,360 +0,0 @@
import { Context } from 'hono';
import { Logger } from '@stock-bot/utils';
import { DataCatalogService } from '../services/DataCatalogService';
import {
CreateDataAssetRequest,
UpdateDataAssetRequest,
DataAssetType,
DataClassification
} from '../types/DataCatalog';
export class DataCatalogController {
constructor(
private dataCatalogService: DataCatalogService,
private logger: Logger
) {}
async createAsset(c: Context) {
try {
const request: CreateDataAssetRequest = await c.req.json();
// Validate required fields
if (!request.name || !request.type || !request.description || !request.owner) {
return c.json({ error: 'Missing required fields: name, type, description, owner' }, 400);
}
const asset = await this.dataCatalogService.createAsset(request);
this.logger.info('Asset created via API', {
assetId: asset.id,
name: asset.name,
type: asset.type
});
return c.json(asset, 201);
} catch (error) {
this.logger.error('Failed to create asset', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getAsset(c: Context) {
try {
const assetId = c.req.param('id');
if (!assetId) {
return c.json({ error: 'Asset ID is required' }, 400);
}
const asset = await this.dataCatalogService.getAsset(assetId);
if (!asset) {
return c.json({ error: 'Asset not found' }, 404);
}
return c.json(asset);
} catch (error) {
this.logger.error('Failed to get asset', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async updateAsset(c: Context) {
try {
const assetId = c.req.param('id');
const updates: UpdateDataAssetRequest = await c.req.json();
if (!assetId) {
return c.json({ error: 'Asset ID is required' }, 400);
}
const asset = await this.dataCatalogService.updateAsset(assetId, updates);
if (!asset) {
return c.json({ error: 'Asset not found' }, 404);
}
this.logger.info('Asset updated via API', {
assetId,
changes: Object.keys(updates)
});
return c.json(asset);
} catch (error) {
this.logger.error('Failed to update asset', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async deleteAsset(c: Context) {
try {
const assetId = c.req.param('id');
if (!assetId) {
return c.json({ error: 'Asset ID is required' }, 400);
}
await this.dataCatalogService.deleteAsset(assetId);
this.logger.info('Asset deleted via API', { assetId });
return c.json({ message: 'Asset deleted successfully' });
} catch (error) {
this.logger.error('Failed to delete asset', { error });
if (error instanceof Error && error.message.includes('not found')) {
return c.json({ error: 'Asset not found' }, 404);
}
return c.json({ error: 'Internal server error' }, 500);
}
}
async listAssets(c: Context) {
try {
const query = c.req.query();
const filters: Record<string, any> = {};
// Parse query parameters
if (query.type) filters.type = query.type;
if (query.owner) filters.owner = query.owner;
if (query.classification) filters.classification = query.classification;
if (query.tags) {
filters.tags = Array.isArray(query.tags) ? query.tags : [query.tags];
}
const assets = await this.dataCatalogService.listAssets(filters);
return c.json({
assets,
total: assets.length,
filters: filters
});
} catch (error) {
this.logger.error('Failed to list assets', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async searchAssets(c: Context) {
try {
const query = c.req.query('q');
const queryParams = c.req.query();
if (!query) {
return c.json({ error: 'Search query is required' }, 400);
}
const filters: Record<string, any> = {};
if (queryParams.type) filters.type = queryParams.type;
if (queryParams.owner) filters.owner = queryParams.owner;
if (queryParams.classification) filters.classification = queryParams.classification;
const assets = await this.dataCatalogService.searchAssets(query, filters);
this.logger.info('Asset search performed', {
query,
filters,
resultCount: assets.length
});
return c.json({
assets,
total: assets.length,
query,
filters
});
} catch (error) {
this.logger.error('Failed to search assets', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getAssetsByOwner(c: Context) {
try {
const owner = c.req.param('owner');
if (!owner) {
return c.json({ error: 'Owner is required' }, 400);
}
const assets = await this.dataCatalogService.getAssetsByOwner(owner);
return c.json({
assets,
total: assets.length,
owner
});
} catch (error) {
this.logger.error('Failed to get assets by owner', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getAssetsByType(c: Context) {
try {
const type = c.req.param('type') as DataAssetType;
if (!type) {
return c.json({ error: 'Asset type is required' }, 400);
}
if (!Object.values(DataAssetType).includes(type)) {
return c.json({ error: 'Invalid asset type' }, 400);
}
const assets = await this.dataCatalogService.getAssetsByType(type);
return c.json({
assets,
total: assets.length,
type
});
} catch (error) {
this.logger.error('Failed to get assets by type', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getAssetsByClassification(c: Context) {
try {
const classification = c.req.param('classification') as DataClassification;
if (!classification) {
return c.json({ error: 'Classification is required' }, 400);
}
if (!Object.values(DataClassification).includes(classification)) {
return c.json({ error: 'Invalid classification' }, 400);
}
const assets = await this.dataCatalogService.getAssetsByClassification(classification);
return c.json({
assets,
total: assets.length,
classification
});
} catch (error) {
this.logger.error('Failed to get assets by classification', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getAssetsByTags(c: Context) {
try {
const tagsParam = c.req.query('tags');
if (!tagsParam) {
return c.json({ error: 'Tags parameter is required' }, 400);
}
const tags = Array.isArray(tagsParam) ? tagsParam : [tagsParam];
const assets = await this.dataCatalogService.getAssetsByTags(tags);
return c.json({
assets,
total: assets.length,
tags
});
} catch (error) {
this.logger.error('Failed to get assets by tags', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getAssetMetrics(c: Context) {
try {
const assetId = c.req.param('id');
if (!assetId) {
return c.json({ error: 'Asset ID is required' }, 400);
}
const asset = await this.dataCatalogService.getAsset(assetId);
if (!asset) {
return c.json({ error: 'Asset not found' }, 404);
}
const metrics = {
id: asset.id,
name: asset.name,
type: asset.type,
classification: asset.classification,
usage: {
accessCount: asset.usage.accessCount,
uniqueUsers: asset.usage.uniqueUsers,
lastAccessed: asset.usage.lastAccessed,
usageTrend: asset.usage.usageTrend
},
quality: {
overallScore: asset.quality.overallScore,
lastAssessment: asset.quality.lastAssessment,
issueCount: asset.quality.issues.filter(issue => !issue.resolved).length
},
governance: {
policiesApplied: asset.governance.policies.length,
complianceStatus: asset.governance.compliance.every(c => c.status === 'passed') ? 'compliant' : 'non-compliant',
auditEntries: asset.governance.audit.length
},
lineage: {
upstreamCount: asset.lineage.upstreamAssets.length,
downstreamCount: asset.lineage.downstreamAssets.length
}
};
return c.json(metrics);
} catch (error) {
this.logger.error('Failed to get asset metrics', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getCatalogStatistics(c: Context) {
try {
const allAssets = await this.dataCatalogService.listAssets();
const statistics = {
totalAssets: allAssets.length,
assetsByType: this.groupByProperty(allAssets, 'type'),
assetsByClassification: this.groupByProperty(allAssets, 'classification'),
assetsByOwner: this.groupByProperty(allAssets, 'owner'),
recentAssets: allAssets
.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime())
.slice(0, 10)
.map(asset => ({
id: asset.id,
name: asset.name,
type: asset.type,
owner: asset.owner,
createdAt: asset.createdAt
})),
mostAccessed: allAssets
.sort((a, b) => b.usage.accessCount - a.usage.accessCount)
.slice(0, 10)
.map(asset => ({
id: asset.id,
name: asset.name,
type: asset.type,
accessCount: asset.usage.accessCount,
lastAccessed: asset.usage.lastAccessed
}))
};
return c.json(statistics);
} catch (error) {
this.logger.error('Failed to get catalog statistics', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
// Helper method to group assets by property
private groupByProperty(assets: any[], property: string): Record<string, number> {
return assets.reduce((acc, asset) => {
const value = asset[property];
acc[value] = (acc[value] || 0) + 1;
return acc;
}, {});
}
}

View file

@ -1,414 +0,0 @@
import { Hono } from 'hono';
import { DataGovernanceService } from '../services/DataGovernanceService';
import {
GovernancePolicy,
ComplianceCheck,
AccessRequest,
DataSubjectRequest,
AuditLog
} from '../types/DataCatalog';
export class GovernanceController {
private app: Hono;
private governanceService: DataGovernanceService;
constructor() {
this.app = new Hono();
this.governanceService = new DataGovernanceService();
this.setupRoutes();
}
private setupRoutes() {
// Create governance policy
this.app.post('/policies', async (c) => {
try {
const policy: Omit<GovernancePolicy, 'id' | 'createdAt' | 'updatedAt'> = await c.req.json();
const createdPolicy = await this.governanceService.createPolicy(policy);
return c.json({
success: true,
data: createdPolicy
});
} catch (error) {
console.error('Error creating governance policy:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get governance policies
this.app.get('/policies', async (c) => {
try {
const type = c.req.query('type');
const category = c.req.query('category');
const active = c.req.query('active') === 'true';
const filters: any = {};
if (type) filters.type = type;
if (category) filters.category = category;
if (active !== undefined) filters.active = active;
const policies = await this.governanceService.getPolicies(filters);
return c.json({
success: true,
data: policies
});
} catch (error) {
console.error('Error getting governance policies:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Update governance policy
this.app.put('/policies/:policyId', async (c) => {
try {
const policyId = c.req.param('policyId');
const updates: Partial<GovernancePolicy> = await c.req.json();
const updatedPolicy = await this.governanceService.updatePolicy(policyId, updates);
return c.json({
success: true,
data: updatedPolicy
});
} catch (error) {
console.error('Error updating governance policy:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Delete governance policy
this.app.delete('/policies/:policyId', async (c) => {
try {
const policyId = c.req.param('policyId');
await this.governanceService.deletePolicy(policyId);
return c.json({
success: true,
message: 'Governance policy deleted successfully'
});
} catch (error) {
console.error('Error deleting governance policy:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Apply policy to asset
this.app.post('/policies/:policyId/apply/:assetId', async (c) => {
try {
const policyId = c.req.param('policyId');
const assetId = c.req.param('assetId');
await this.governanceService.applyPolicy(policyId, assetId);
return c.json({
success: true,
message: 'Policy applied successfully'
});
} catch (error) {
console.error('Error applying policy:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Check compliance for asset
this.app.post('/compliance/check', async (c) => {
try {
const request: { assetId: string; policyIds?: string[] } = await c.req.json();
const complianceResult = await this.governanceService.checkCompliance(
request.assetId,
request.policyIds
);
return c.json({
success: true,
data: complianceResult
});
} catch (error) {
console.error('Error checking compliance:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get compliance violations
this.app.get('/compliance/violations', async (c) => {
try {
const assetId = c.req.query('assetId');
const severity = c.req.query('severity');
const status = c.req.query('status');
const limit = c.req.query('limit') ? parseInt(c.req.query('limit')!) : 100;
const offset = c.req.query('offset') ? parseInt(c.req.query('offset')!) : 0;
const filters: any = {};
if (assetId) filters.assetId = assetId;
if (severity) filters.severity = severity;
if (status) filters.status = status;
const violations = await this.governanceService.getComplianceViolations(
filters,
{ limit, offset }
);
return c.json({
success: true,
data: violations
});
} catch (error) {
console.error('Error getting compliance violations:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Request access to asset
this.app.post('/access/request', async (c) => {
try {
const request: Omit<AccessRequest, 'id' | 'requestedAt' | 'status'> = await c.req.json();
const accessRequest = await this.governanceService.requestAccess(request);
return c.json({
success: true,
data: accessRequest
});
} catch (error) {
console.error('Error requesting access:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Approve/deny access request
this.app.patch('/access/:requestId', async (c) => {
try {
const requestId = c.req.param('requestId');
const { action, reviewedBy, reviewComments } = await c.req.json();
const updatedRequest = await this.governanceService.reviewAccessRequest(
requestId,
action,
reviewedBy,
reviewComments
);
return c.json({
success: true,
data: updatedRequest
});
} catch (error) {
console.error('Error reviewing access request:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Check access authorization
this.app.post('/access/check', async (c) => {
try {
const { userId, assetId, action } = await c.req.json();
const authorized = await this.governanceService.checkAccess(userId, assetId, action);
return c.json({
success: true,
data: {
userId,
assetId,
action,
authorized
}
});
} catch (error) {
console.error('Error checking access:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Handle data subject request (GDPR)
this.app.post('/privacy/subject-request', async (c) => {
try {
const request: Omit<DataSubjectRequest, 'id' | 'submittedAt' | 'status'> = await c.req.json();
const subjectRequest = await this.governanceService.handleDataSubjectRequest(request);
return c.json({
success: true,
data: subjectRequest
});
} catch (error) {
console.error('Error handling data subject request:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Anonymize asset data
this.app.post('/privacy/anonymize/:assetId', async (c) => {
try {
const assetId = c.req.param('assetId');
const { fields, method, requestedBy } = await c.req.json();
const result = await this.governanceService.anonymizeData(
assetId,
fields,
method,
requestedBy
);
return c.json({
success: true,
data: result
});
} catch (error) {
console.error('Error anonymizing data:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get audit logs
this.app.get('/audit/logs', async (c) => {
try {
const assetId = c.req.query('assetId');
const userId = c.req.query('userId');
const action = c.req.query('action');
const startDate = c.req.query('startDate');
const endDate = c.req.query('endDate');
const limit = c.req.query('limit') ? parseInt(c.req.query('limit')!) : 100;
const offset = c.req.query('offset') ? parseInt(c.req.query('offset')!) : 0;
const filters: any = {};
if (assetId) filters.assetId = assetId;
if (userId) filters.userId = userId;
if (action) filters.action = action;
if (startDate) filters.startDate = new Date(startDate);
if (endDate) filters.endDate = new Date(endDate);
const logs = await this.governanceService.getAuditLogs(filters, { limit, offset });
return c.json({
success: true,
data: logs
});
} catch (error) {
console.error('Error getting audit logs:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Log access event
this.app.post('/audit/log', async (c) => {
try {
const logEntry: Omit<AuditLog, 'id' | 'timestamp'> = await c.req.json();
const logged = await this.governanceService.logAccess(logEntry);
return c.json({
success: true,
data: logged
});
} catch (error) {
console.error('Error logging access event:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get retention policies
this.app.get('/retention/policies', async (c) => {
try {
const assetType = c.req.query('assetType');
const policies = await this.governanceService.getRetentionPolicies(assetType);
return c.json({
success: true,
data: policies
});
} catch (error) {
console.error('Error getting retention policies:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Apply retention policy
this.app.post('/retention/apply', async (c) => {
try {
const { assetId, policyId, requestedBy } = await c.req.json();
const result = await this.governanceService.applyRetentionPolicy(
assetId,
policyId,
requestedBy
);
return c.json({
success: true,
data: result
});
} catch (error) {
console.error('Error applying retention policy:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get governance metrics
this.app.get('/metrics', async (c) => {
try {
const timeRange = c.req.query('timeRange') || '30d';
const metrics = await this.governanceService.getGovernanceMetrics(timeRange);
return c.json({
success: true,
data: metrics
});
} catch (error) {
console.error('Error getting governance metrics:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
}
public getApp(): Hono {
return this.app;
}
}

View file

@ -1,172 +0,0 @@
import { Hono } from 'hono';
export class HealthController {
private app: Hono;
constructor() {
this.app = new Hono();
this.setupRoutes();
}
private setupRoutes() {
// Basic health check
this.app.get('/', async (c) => {
return c.json({
service: 'data-catalog',
status: 'healthy',
timestamp: new Date().toISOString(),
version: process.env.SERVICE_VERSION || '1.0.0'
});
});
// Detailed health check
this.app.get('/detailed', async (c) => {
try {
const healthStatus = {
service: 'data-catalog',
status: 'healthy',
timestamp: new Date().toISOString(),
version: process.env.SERVICE_VERSION || '1.0.0',
uptime: process.uptime(),
memory: process.memoryUsage(),
dependencies: {
database: await this.checkDatabase(),
search: await this.checkSearchService(),
eventBus: await this.checkEventBus()
}
};
// Determine overall status based on dependencies
const hasUnhealthyDependencies = Object.values(healthStatus.dependencies)
.some(dep => dep.status !== 'healthy');
if (hasUnhealthyDependencies) {
healthStatus.status = 'degraded';
}
const statusCode = healthStatus.status === 'healthy' ? 200 : 503;
return c.json(healthStatus, statusCode);
} catch (error) {
console.error('Health check error:', error);
return c.json({
service: 'data-catalog',
status: 'unhealthy',
timestamp: new Date().toISOString(),
error: error instanceof Error ? error.message : 'Unknown error'
}, 503);
}
});
// Readiness check
this.app.get('/ready', async (c) => {
try {
// Check if service is ready to accept requests
const readyChecks = await Promise.all([
this.checkDatabase(),
this.checkSearchService()
]);
const isReady = readyChecks.every(check => check.status === 'healthy');
if (isReady) {
return c.json({
service: 'data-catalog',
ready: true,
timestamp: new Date().toISOString()
});
} else {
return c.json({
service: 'data-catalog',
ready: false,
timestamp: new Date().toISOString(),
checks: readyChecks
}, 503);
}
} catch (error) {
console.error('Readiness check error:', error);
return c.json({
service: 'data-catalog',
ready: false,
timestamp: new Date().toISOString(),
error: error instanceof Error ? error.message : 'Unknown error'
}, 503);
}
});
// Liveness check
this.app.get('/live', async (c) => {
return c.json({
service: 'data-catalog',
alive: true,
timestamp: new Date().toISOString()
});
});
}
private async checkDatabase(): Promise<{ name: string; status: string; responseTime?: number }> {
const start = Date.now();
try {
// Simulate database check
// In real implementation, this would ping the actual database
await new Promise(resolve => setTimeout(resolve, 10));
return {
name: 'database',
status: 'healthy',
responseTime: Date.now() - start
};
} catch (error) {
return {
name: 'database',
status: 'unhealthy',
responseTime: Date.now() - start
};
}
}
private async checkSearchService(): Promise<{ name: string; status: string; responseTime?: number }> {
const start = Date.now();
try {
// Simulate search service check
// In real implementation, this would check search index health
await new Promise(resolve => setTimeout(resolve, 5));
return {
name: 'search',
status: 'healthy',
responseTime: Date.now() - start
};
} catch (error) {
return {
name: 'search',
status: 'unhealthy',
responseTime: Date.now() - start
};
}
}
private async checkEventBus(): Promise<{ name: string; status: string; responseTime?: number }> {
const start = Date.now();
try {
// Simulate event bus check
// In real implementation, this would check message broker connectivity
await new Promise(resolve => setTimeout(resolve, 3));
return {
name: 'eventBus',
status: 'healthy',
responseTime: Date.now() - start
};
} catch (error) {
return {
name: 'eventBus',
status: 'unhealthy',
responseTime: Date.now() - start
};
}
}
public getApp(): Hono {
return this.app;
}
}

View file

@ -1,211 +0,0 @@
import { Hono } from 'hono';
import { DataLineageService } from '../services/DataLineageService';
import { CreateLineageRequest, LineageQuery, ImpactAnalysisQuery } from '../types/DataCatalog';
export class LineageController {
private app: Hono;
private lineageService: DataLineageService;
constructor() {
this.app = new Hono();
this.lineageService = new DataLineageService();
this.setupRoutes();
}
private setupRoutes() {
// Create lineage relationship
this.app.post('/', async (c) => {
try {
const request: CreateLineageRequest = await c.req.json();
const lineage = await this.lineageService.createLineage(request);
return c.json({
success: true,
data: lineage
});
} catch (error) {
console.error('Error creating lineage:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get lineage for asset
this.app.get('/assets/:assetId', async (c) => {
try {
const assetId = c.req.param('assetId');
const direction = c.req.query('direction') as 'upstream' | 'downstream' | 'both';
const depth = c.req.query('depth') ? parseInt(c.req.query('depth')!) : undefined;
const lineage = await this.lineageService.getAssetLineage(assetId, {
direction: direction || 'both',
depth: depth || 10
});
return c.json({
success: true,
data: lineage
});
} catch (error) {
console.error('Error getting asset lineage:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get upstream dependencies
this.app.get('/assets/:assetId/upstream', async (c) => {
try {
const assetId = c.req.param('assetId');
const depth = c.req.query('depth') ? parseInt(c.req.query('depth')!) : 5;
const upstream = await this.lineageService.getUpstreamDependencies(assetId, depth);
return c.json({
success: true,
data: upstream
});
} catch (error) {
console.error('Error getting upstream dependencies:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get downstream dependencies
this.app.get('/assets/:assetId/downstream', async (c) => {
try {
const assetId = c.req.param('assetId');
const depth = c.req.query('depth') ? parseInt(c.req.query('depth')!) : 5;
const downstream = await this.lineageService.getDownstreamDependencies(assetId, depth);
return c.json({
success: true,
data: downstream
});
} catch (error) {
console.error('Error getting downstream dependencies:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Perform impact analysis
this.app.post('/impact-analysis', async (c) => {
try {
const query: ImpactAnalysisQuery = await c.req.json();
const analysis = await this.lineageService.performImpactAnalysis(query);
return c.json({
success: true,
data: analysis
});
} catch (error) {
console.error('Error performing impact analysis:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get lineage graph
this.app.get('/graph', async (c) => {
try {
const assetIds = c.req.query('assetIds')?.split(',') || [];
const depth = c.req.query('depth') ? parseInt(c.req.query('depth')!) : 3;
if (assetIds.length === 0) {
return c.json({
success: false,
error: 'Asset IDs are required'
}, 400);
}
const graph = await this.lineageService.getLineageGraph(assetIds, depth);
return c.json({
success: true,
data: graph
});
} catch (error) {
console.error('Error getting lineage graph:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Check for circular dependencies
this.app.get('/assets/:assetId/circular-check', async (c) => {
try {
const assetId = c.req.param('assetId');
const hasCycles = await this.lineageService.hasCircularDependencies(assetId);
return c.json({
success: true,
data: {
assetId,
hasCircularDependencies: hasCycles
}
});
} catch (error) {
console.error('Error checking circular dependencies:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Delete lineage relationship
this.app.delete('/:lineageId', async (c) => {
try {
const lineageId = c.req.param('lineageId');
await this.lineageService.deleteLineage(lineageId);
return c.json({
success: true,
message: 'Lineage relationship deleted successfully'
});
} catch (error) {
console.error('Error deleting lineage:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get lineage statistics
this.app.get('/stats', async (c) => {
try {
const stats = await this.lineageService.getLineageStatistics();
return c.json({
success: true,
data: stats
});
} catch (error) {
console.error('Error getting lineage statistics:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
}
public getApp(): Hono {
return this.app;
}
}

View file

@ -1,321 +0,0 @@
import { Hono } from 'hono';
import { DataQualityService } from '../services/DataQualityService';
import {
QualityAssessmentRequest,
QualityRule,
QualityIssue,
QualityReportRequest
} from '../types/DataCatalog';
export class QualityController {
private app: Hono;
private qualityService: DataQualityService;
constructor() {
this.app = new Hono();
this.qualityService = new DataQualityService();
this.setupRoutes();
}
private setupRoutes() {
// Assess asset quality
this.app.post('/assess', async (c) => {
try {
const request: QualityAssessmentRequest = await c.req.json();
const assessment = await this.qualityService.assessQuality(request);
return c.json({
success: true,
data: assessment
});
} catch (error) {
console.error('Error assessing quality:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get quality assessment for asset
this.app.get('/assets/:assetId', async (c) => {
try {
const assetId = c.req.param('assetId');
const assessment = await this.qualityService.getQualityAssessment(assetId);
if (!assessment) {
return c.json({
success: false,
error: 'Quality assessment not found'
}, 404);
}
return c.json({
success: true,
data: assessment
});
} catch (error) {
console.error('Error getting quality assessment:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Create quality rule
this.app.post('/rules', async (c) => {
try {
const rule: Omit<QualityRule, 'id' | 'createdAt' | 'updatedAt'> = await c.req.json();
const createdRule = await this.qualityService.createQualityRule(rule);
return c.json({
success: true,
data: createdRule
});
} catch (error) {
console.error('Error creating quality rule:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get quality rules
this.app.get('/rules', async (c) => {
try {
const assetType = c.req.query('assetType');
const dimension = c.req.query('dimension');
const active = c.req.query('active') === 'true';
const filters: any = {};
if (assetType) filters.assetType = assetType;
if (dimension) filters.dimension = dimension;
if (active !== undefined) filters.active = active;
const rules = await this.qualityService.getQualityRules(filters);
return c.json({
success: true,
data: rules
});
} catch (error) {
console.error('Error getting quality rules:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Update quality rule
this.app.put('/rules/:ruleId', async (c) => {
try {
const ruleId = c.req.param('ruleId');
const updates: Partial<QualityRule> = await c.req.json();
const updatedRule = await this.qualityService.updateQualityRule(ruleId, updates);
return c.json({
success: true,
data: updatedRule
});
} catch (error) {
console.error('Error updating quality rule:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Delete quality rule
this.app.delete('/rules/:ruleId', async (c) => {
try {
const ruleId = c.req.param('ruleId');
await this.qualityService.deleteQualityRule(ruleId);
return c.json({
success: true,
message: 'Quality rule deleted successfully'
});
} catch (error) {
console.error('Error deleting quality rule:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Validate quality rules for asset
this.app.post('/validate/:assetId', async (c) => {
try {
const assetId = c.req.param('assetId');
const data = await c.req.json();
const validationResults = await this.qualityService.validateQualityRules(assetId, data);
return c.json({
success: true,
data: validationResults
});
} catch (error) {
console.error('Error validating quality rules:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Report quality issue
this.app.post('/issues', async (c) => {
try {
const issue: Omit<QualityIssue, 'id' | 'reportedAt' | 'updatedAt'> = await c.req.json();
const reportedIssue = await this.qualityService.reportQualityIssue(issue);
return c.json({
success: true,
data: reportedIssue
});
} catch (error) {
console.error('Error reporting quality issue:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get quality issues
this.app.get('/issues', async (c) => {
try {
const assetId = c.req.query('assetId');
const severity = c.req.query('severity');
const status = c.req.query('status');
const dimension = c.req.query('dimension');
const limit = c.req.query('limit') ? parseInt(c.req.query('limit')!) : 100;
const offset = c.req.query('offset') ? parseInt(c.req.query('offset')!) : 0;
const filters: any = {};
if (assetId) filters.assetId = assetId;
if (severity) filters.severity = severity;
if (status) filters.status = status;
if (dimension) filters.dimension = dimension;
const issues = await this.qualityService.getQualityIssues(filters, { limit, offset });
return c.json({
success: true,
data: issues
});
} catch (error) {
console.error('Error getting quality issues:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Resolve quality issue
this.app.patch('/issues/:issueId/resolve', async (c) => {
try {
const issueId = c.req.param('issueId');
const { resolution, resolvedBy } = await c.req.json();
const resolvedIssue = await this.qualityService.resolveQualityIssue(
issueId,
resolution,
resolvedBy
);
return c.json({
success: true,
data: resolvedIssue
});
} catch (error) {
console.error('Error resolving quality issue:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get quality trends
this.app.get('/trends', async (c) => {
try {
const assetId = c.req.query('assetId');
const dimension = c.req.query('dimension');
const timeRange = c.req.query('timeRange') || '30d';
const trends = await this.qualityService.getQualityTrends(
assetId,
dimension,
timeRange
);
return c.json({
success: true,
data: trends
});
} catch (error) {
console.error('Error getting quality trends:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Generate quality report
this.app.post('/reports', async (c) => {
try {
const request: QualityReportRequest = await c.req.json();
const report = await this.qualityService.generateQualityReport(request);
return c.json({
success: true,
data: report
});
} catch (error) {
console.error('Error generating quality report:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
// Get quality metrics summary
this.app.get('/metrics/summary', async (c) => {
try {
const assetIds = c.req.query('assetIds')?.split(',');
const timeRange = c.req.query('timeRange') || '7d';
const summary = await this.qualityService.getQualityMetricsSummary(
assetIds,
timeRange
);
return c.json({
success: true,
data: summary
});
} catch (error) {
console.error('Error getting quality metrics summary:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
});
}
public getApp(): Hono {
return this.app;
}
}

View file

@ -1,334 +0,0 @@
import { Context } from 'hono';
import { Logger } from '@stock-bot/utils';
import { SearchService } from '../services/SearchService';
import { SearchQuery, SearchFilters } from '../types/DataCatalog';
export class SearchController {
constructor(
private searchService: SearchService,
private logger: Logger
) {}
async search(c: Context) {
try {
const queryParams = c.req.query();
const searchQuery: SearchQuery = {
text: queryParams.q || '',
offset: parseInt(queryParams.offset || '0'),
limit: parseInt(queryParams.limit || '20'),
sortBy: queryParams.sortBy,
sortOrder: queryParams.sortOrder as 'asc' | 'desc',
userId: queryParams.userId
};
// Parse filters
const filters: SearchFilters = {};
if (queryParams.types) {
filters.types = Array.isArray(queryParams.types) ? queryParams.types : [queryParams.types];
}
if (queryParams.classifications) {
filters.classifications = Array.isArray(queryParams.classifications) ? queryParams.classifications : [queryParams.classifications];
}
if (queryParams.owners) {
filters.owners = Array.isArray(queryParams.owners) ? queryParams.owners : [queryParams.owners];
}
if (queryParams.tags) {
filters.tags = Array.isArray(queryParams.tags) ? queryParams.tags : [queryParams.tags];
}
if (queryParams.createdAfter) {
filters.createdAfter = new Date(queryParams.createdAfter);
}
if (queryParams.createdBefore) {
filters.createdBefore = new Date(queryParams.createdBefore);
}
if (Object.keys(filters).length > 0) {
searchQuery.filters = filters;
}
const result = await this.searchService.search(searchQuery);
this.logger.info('Search API call completed', {
query: searchQuery.text,
resultCount: result.total,
searchTime: result.searchTime
});
return c.json(result);
} catch (error) {
this.logger.error('Search API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async suggest(c: Context) {
try {
const partial = c.req.query('q');
if (!partial || partial.length < 2) {
return c.json({ suggestions: [] });
}
const suggestions = await this.searchService.suggest(partial);
return c.json({ suggestions });
} catch (error) {
this.logger.error('Suggestion API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async searchByFacets(c: Context) {
try {
const facets = await c.req.json();
if (!facets || typeof facets !== 'object') {
return c.json({ error: 'Facets object is required' }, 400);
}
const assets = await this.searchService.searchByFacets(facets);
return c.json({
assets,
total: assets.length,
facets
});
} catch (error) {
this.logger.error('Facet search API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async searchSimilar(c: Context) {
try {
const assetId = c.req.param('id');
const limit = parseInt(c.req.query('limit') || '10');
if (!assetId) {
return c.json({ error: 'Asset ID is required' }, 400);
}
const similarAssets = await this.searchService.searchSimilar(assetId, limit);
return c.json({
assetId,
similarAssets,
total: similarAssets.length
});
} catch (error) {
this.logger.error('Similar search API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getPopularSearches(c: Context) {
try {
const limit = parseInt(c.req.query('limit') || '10');
const popularSearches = await this.searchService.getPopularSearches(limit);
return c.json({
searches: popularSearches,
total: popularSearches.length
});
} catch (error) {
this.logger.error('Popular searches API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getRecentSearches(c: Context) {
try {
const userId = c.req.param('userId');
const limit = parseInt(c.req.query('limit') || '10');
if (!userId) {
return c.json({ error: 'User ID is required' }, 400);
}
const recentSearches = await this.searchService.getRecentSearches(userId, limit);
return c.json({
userId,
searches: recentSearches,
total: recentSearches.length
});
} catch (error) {
this.logger.error('Recent searches API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async reindexAssets(c: Context) {
try {
await this.searchService.reindexAll();
this.logger.info('Search index rebuilt via API');
return c.json({ message: 'Search index rebuilt successfully' });
} catch (error) {
this.logger.error('Reindex API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getSearchAnalytics(c: Context) {
try {
const timeframe = c.req.query('timeframe') || 'week';
const analytics = await this.searchService.getSearchAnalytics(timeframe);
return c.json({
timeframe,
analytics
});
} catch (error) {
this.logger.error('Search analytics API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async advancedSearch(c: Context) {
try {
const searchRequest = await c.req.json();
if (!searchRequest) {
return c.json({ error: 'Search request is required' }, 400);
}
// Build advanced search query
const searchQuery: SearchQuery = {
text: searchRequest.query || '',
offset: searchRequest.offset || 0,
limit: searchRequest.limit || 20,
sortBy: searchRequest.sortBy,
sortOrder: searchRequest.sortOrder,
userId: searchRequest.userId,
filters: searchRequest.filters
};
const result = await this.searchService.search(searchQuery);
// If no results and query is complex, try to suggest simpler alternatives
if (result.total === 0 && searchQuery.text && searchQuery.text.split(' ').length > 2) {
const simpleQuery = searchQuery.text.split(' ')[0];
const simpleResult = await this.searchService.search({
...searchQuery,
text: simpleQuery
});
if (simpleResult.total > 0) {
result.suggestions = [`Try searching for "${simpleQuery}"`];
}
}
this.logger.info('Advanced search API call completed', {
query: searchQuery.text,
resultCount: result.total,
searchTime: result.searchTime
});
return c.json(result);
} catch (error) {
this.logger.error('Advanced search API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async exportSearchResults(c: Context) {
try {
const queryParams = c.req.query();
const format = queryParams.format || 'json';
if (format !== 'json' && format !== 'csv') {
return c.json({ error: 'Unsupported export format. Use json or csv' }, 400);
}
// Perform search with maximum results
const searchQuery: SearchQuery = {
text: queryParams.q || '',
offset: 0,
limit: 10000, // Large limit for export
sortBy: queryParams.sortBy,
sortOrder: queryParams.sortOrder as 'asc' | 'desc'
};
const result = await this.searchService.search(searchQuery);
if (format === 'csv') {
const csv = this.convertToCSV(result.assets);
c.header('Content-Type', 'text/csv');
c.header('Content-Disposition', 'attachment; filename="search-results.csv"');
return c.text(csv);
} else {
c.header('Content-Type', 'application/json');
c.header('Content-Disposition', 'attachment; filename="search-results.json"');
return c.json(result);
}
} catch (error) {
this.logger.error('Export search results API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
async getSearchStatistics(c: Context) {
try {
const timeframe = c.req.query('timeframe') || 'week';
const analytics = await this.searchService.getSearchAnalytics(timeframe);
const statistics = {
searchVolume: analytics.totalSearches,
uniqueQueries: analytics.uniqueQueries,
averageResultsPerSearch: Math.round(analytics.averageResults),
noResultQueriesPercent: analytics.totalSearches > 0
? Math.round((analytics.noResultQueries / analytics.totalSearches) * 100)
: 0,
topSearchTerms: analytics.topQueries.slice(0, 5),
searchTrend: analytics.searchTrend.trend,
facetUsage: analytics.facetUsage
};
return c.json({
timeframe,
statistics
});
} catch (error) {
this.logger.error('Search statistics API call failed', { error });
return c.json({ error: 'Internal server error' }, 500);
}
}
// Helper method to convert assets to CSV format
private convertToCSV(assets: any[]): string {
if (assets.length === 0) {
return 'No results found';
}
const headers = [
'ID', 'Name', 'Type', 'Description', 'Owner', 'Classification',
'Tags', 'Created At', 'Updated At', 'Last Accessed'
];
const csvRows = [headers.join(',')];
for (const asset of assets) {
const row = [
asset.id,
`"${asset.name.replace(/"/g, '""')}"`,
asset.type,
`"${asset.description.replace(/"/g, '""')}"`,
asset.owner,
asset.classification,
`"${asset.tags.join('; ')}"`,
asset.createdAt.toISOString(),
asset.updatedAt.toISOString(),
asset.lastAccessed ? asset.lastAccessed.toISOString() : ''
];
csvRows.push(row.join(','));
}
return csvRows.join('\n');
}
}

View file

@ -1,201 +0,0 @@
import { Hono } from 'hono';
import { cors } from 'hono/cors';
import { logger } from 'hono/logger';
import { prettyJSON } from 'hono/pretty-json';
import { serve } from '@hono/node-server';
// Import controllers
import { DataCatalogController } from './controllers/DataCatalogController';
import { SearchController } from './controllers/SearchController';
import { LineageController } from './controllers/LineageController';
import { QualityController } from './controllers/QualityController';
import { GovernanceController } from './controllers/GovernanceController';
import { HealthController } from './controllers/HealthController';
// Create main application
const app = new Hono();
// Add middleware
app.use('*', cors({
origin: ['http://localhost:3000', 'http://localhost:4000', 'http://localhost:5173'],
allowMethods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'],
allowHeaders: ['Content-Type', 'Authorization', 'X-Requested-With'],
credentials: true
}));
app.use('*', logger());
app.use('*', prettyJSON());
// Initialize controllers
const dataCatalogController = new DataCatalogController();
const searchController = new SearchController();
const lineageController = new LineageController();
const qualityController = new QualityController();
const governanceController = new GovernanceController();
const healthController = new HealthController();
// Setup routes
app.route('/api/v1/assets', dataCatalogController.getApp());
app.route('/api/v1/search', searchController.getApp());
app.route('/api/v1/lineage', lineageController.getApp());
app.route('/api/v1/quality', qualityController.getApp());
app.route('/api/v1/governance', governanceController.getApp());
app.route('/health', healthController.getApp());
// Root endpoint
app.get('/', (c) => {
return c.json({
service: 'Data Catalog Service',
version: '1.0.0',
description: 'Comprehensive data catalog and governance service for stock-bot data platform',
endpoints: {
assets: '/api/v1/assets',
search: '/api/v1/search',
lineage: '/api/v1/lineage',
quality: '/api/v1/quality',
governance: '/api/v1/governance',
health: '/health'
},
documentation: '/api/v1/docs'
});
});
// API documentation endpoint
app.get('/api/v1/docs', (c) => {
return c.json({
title: 'Data Catalog Service API',
version: '1.0.0',
description: 'RESTful API for data catalog, lineage, quality, and governance operations',
endpoints: {
assets: {
description: 'Data asset management',
methods: {
'GET /api/v1/assets': 'List assets with filtering and pagination',
'POST /api/v1/assets': 'Create new data asset',
'GET /api/v1/assets/:id': 'Get asset by ID',
'PUT /api/v1/assets/:id': 'Update asset',
'DELETE /api/v1/assets/:id': 'Delete asset',
'GET /api/v1/assets/:id/schema': 'Get asset schema',
'PUT /api/v1/assets/:id/schema': 'Update asset schema',
'GET /api/v1/assets/:id/usage': 'Get asset usage analytics',
'POST /api/v1/assets/:id/usage': 'Record asset usage'
}
},
search: {
description: 'Data discovery and search',
methods: {
'GET /api/v1/search': 'Search assets with full-text and faceted search',
'GET /api/v1/search/suggest': 'Get search suggestions',
'GET /api/v1/search/facets': 'Get available search facets',
'GET /api/v1/search/similar/:id': 'Find similar assets',
'GET /api/v1/search/trending': 'Get trending searches',
'POST /api/v1/search/export': 'Export search results'
}
},
lineage: {
description: 'Data lineage and impact analysis',
methods: {
'POST /api/v1/lineage': 'Create lineage relationship',
'GET /api/v1/lineage/assets/:assetId': 'Get asset lineage',
'GET /api/v1/lineage/assets/:assetId/upstream': 'Get upstream dependencies',
'GET /api/v1/lineage/assets/:assetId/downstream': 'Get downstream dependencies',
'POST /api/v1/lineage/impact-analysis': 'Perform impact analysis',
'GET /api/v1/lineage/graph': 'Get lineage graph visualization',
'GET /api/v1/lineage/assets/:assetId/circular-check': 'Check for circular dependencies',
'DELETE /api/v1/lineage/:lineageId': 'Delete lineage relationship',
'GET /api/v1/lineage/stats': 'Get lineage statistics'
}
},
quality: {
description: 'Data quality assessment and monitoring',
methods: {
'POST /api/v1/quality/assess': 'Assess data quality',
'GET /api/v1/quality/assets/:assetId': 'Get quality assessment',
'POST /api/v1/quality/rules': 'Create quality rule',
'GET /api/v1/quality/rules': 'Get quality rules',
'PUT /api/v1/quality/rules/:ruleId': 'Update quality rule',
'DELETE /api/v1/quality/rules/:ruleId': 'Delete quality rule',
'POST /api/v1/quality/validate/:assetId': 'Validate quality rules',
'POST /api/v1/quality/issues': 'Report quality issue',
'GET /api/v1/quality/issues': 'Get quality issues',
'PATCH /api/v1/quality/issues/:issueId/resolve': 'Resolve quality issue',
'GET /api/v1/quality/trends': 'Get quality trends',
'POST /api/v1/quality/reports': 'Generate quality report',
'GET /api/v1/quality/metrics/summary': 'Get quality metrics summary'
}
},
governance: {
description: 'Data governance and compliance',
methods: {
'POST /api/v1/governance/policies': 'Create governance policy',
'GET /api/v1/governance/policies': 'Get governance policies',
'PUT /api/v1/governance/policies/:policyId': 'Update governance policy',
'DELETE /api/v1/governance/policies/:policyId': 'Delete governance policy',
'POST /api/v1/governance/policies/:policyId/apply/:assetId': 'Apply policy to asset',
'POST /api/v1/governance/compliance/check': 'Check compliance',
'GET /api/v1/governance/compliance/violations': 'Get compliance violations',
'POST /api/v1/governance/access/request': 'Request data access',
'PATCH /api/v1/governance/access/:requestId': 'Review access request',
'POST /api/v1/governance/access/check': 'Check access authorization',
'POST /api/v1/governance/privacy/subject-request': 'Handle data subject request',
'POST /api/v1/governance/privacy/anonymize/:assetId': 'Anonymize asset data',
'GET /api/v1/governance/audit/logs': 'Get audit logs',
'POST /api/v1/governance/audit/log': 'Log access event',
'GET /api/v1/governance/retention/policies': 'Get retention policies',
'POST /api/v1/governance/retention/apply': 'Apply retention policy',
'GET /api/v1/governance/metrics': 'Get governance metrics'
}
},
health: {
description: 'Service health monitoring',
methods: {
'GET /health': 'Basic health check',
'GET /health/detailed': 'Detailed health check with dependencies',
'GET /health/ready': 'Readiness check',
'GET /health/live': 'Liveness check'
}
}
}
});
});
// 404 handler
app.notFound((c) => {
return c.json({
success: false,
error: 'Endpoint not found',
availableEndpoints: [
'/api/v1/assets',
'/api/v1/search',
'/api/v1/lineage',
'/api/v1/quality',
'/api/v1/governance',
'/health'
]
}, 404);
});
// Error handler
app.onError((err, c) => {
console.error('Application error:', err);
return c.json({
success: false,
error: 'Internal server error',
message: process.env.NODE_ENV === 'development' ? err.message : 'Something went wrong'
}, 500);
});
// Start server
const port = parseInt(process.env.PORT || '3003');
console.log(`🚀 Data Catalog Service starting on port ${port}`);
console.log(`📚 API Documentation available at http://localhost:${port}/api/v1/docs`);
console.log(`❤️ Health endpoint available at http://localhost:${port}/health`);
serve({
fetch: app.fetch,
port: port
});
export default app;

View file

@ -1,312 +0,0 @@
import { EventBus } from '@stock-bot/event-bus';
import { Logger } from '@stock-bot/utils';
import {
DataAsset,
CreateDataAssetRequest,
UpdateDataAssetRequest,
DataAssetType,
DataClassification
} from '../types/DataCatalog';
export interface DataCatalogService {
createAsset(request: CreateDataAssetRequest): Promise<DataAsset>;
getAsset(id: string): Promise<DataAsset | null>;
updateAsset(id: string, request: UpdateDataAssetRequest): Promise<DataAsset | null>;
deleteAsset(id: string): Promise<void>;
listAssets(filters?: Record<string, any>): Promise<DataAsset[]>;
searchAssets(query: string, filters?: Record<string, any>): Promise<DataAsset[]>;
getAssetsByOwner(owner: string): Promise<DataAsset[]>;
getAssetsByType(type: DataAssetType): Promise<DataAsset[]>;
getAssetsByClassification(classification: DataClassification): Promise<DataAsset[]>;
getAssetsByTags(tags: string[]): Promise<DataAsset[]>;
}
export class DataCatalogServiceImpl implements DataCatalogService {
private assets: Map<string, DataAsset> = new Map();
constructor(
private eventBus: EventBus,
private logger: Logger
) {}
async createAsset(request: CreateDataAssetRequest): Promise<DataAsset> {
try {
const asset: DataAsset = {
id: this.generateId(),
name: request.name,
type: request.type,
description: request.description,
owner: request.owner,
steward: request.steward,
tags: request.tags || [],
classification: request.classification,
schema: request.schema,
location: request.location,
metadata: {
customProperties: {},
...request.metadata
},
lineage: {
id: this.generateId(),
assetId: '',
upstreamAssets: [],
downstreamAssets: [],
transformations: [],
impact: {
downstreamAssets: [],
affectedUsers: [],
estimatedImpact: 'low',
impactDescription: '',
recommendations: []
},
createdAt: new Date(),
updatedAt: new Date()
},
quality: {
id: this.generateId(),
assetId: '',
overallScore: 100,
dimensions: [],
rules: [],
issues: [],
trend: {
timeframe: 'week',
dataPoints: [],
trend: 'stable',
changeRate: 0
},
lastAssessment: new Date()
},
usage: {
id: this.generateId(),
assetId: '',
accessCount: 0,
uniqueUsers: 0,
lastAccessed: new Date(),
topUsers: [],
accessPatterns: [],
popularQueries: [],
usageTrend: {
timeframe: 'week',
dataPoints: [],
trend: 'stable',
changeRate: 0
}
},
governance: request.governance || {
id: this.generateId(),
assetId: '',
policies: [],
compliance: [],
retention: {
retentionPeriod: 365,
retentionReason: 'Business requirement',
legalHold: false
},
access: {
defaultAccess: 'none',
roles: [],
users: []
},
privacy: {
containsPII: false,
sensitiveFields: [],
anonymizationRules: [],
consentRequired: false,
dataSubjectRights: []
},
audit: []
},
createdAt: new Date(),
updatedAt: new Date()
};
// Set correct asset IDs in nested objects
asset.lineage.assetId = asset.id;
asset.quality.assetId = asset.id;
asset.usage.assetId = asset.id;
asset.governance.assetId = asset.id;
this.assets.set(asset.id, asset);
this.logger.info('Data asset created', { assetId: asset.id, name: asset.name });
await this.eventBus.emit('data.asset.created', {
assetId: asset.id,
asset,
timestamp: new Date()
});
return asset;
} catch (error) {
this.logger.error('Failed to create data asset', { request, error });
throw error;
}
}
async getAsset(id: string): Promise<DataAsset | null> {
try {
const asset = this.assets.get(id);
if (asset) {
// Update last accessed time
asset.lastAccessed = new Date();
asset.usage.lastAccessed = new Date();
asset.usage.accessCount++;
await this.eventBus.emit('data.asset.accessed', {
assetId: id,
timestamp: new Date()
});
}
return asset || null;
} catch (error) {
this.logger.error('Failed to get data asset', { assetId: id, error });
throw error;
}
}
async updateAsset(id: string, request: UpdateDataAssetRequest): Promise<DataAsset | null> {
try {
const asset = this.assets.get(id);
if (!asset) {
return null;
}
// Update only provided fields
if (request.name !== undefined) asset.name = request.name;
if (request.description !== undefined) asset.description = request.description;
if (request.owner !== undefined) asset.owner = request.owner;
if (request.steward !== undefined) asset.steward = request.steward;
if (request.tags !== undefined) asset.tags = request.tags;
if (request.classification !== undefined) asset.classification = request.classification;
if (request.schema !== undefined) asset.schema = request.schema;
if (request.metadata !== undefined) {
asset.metadata = { ...asset.metadata, ...request.metadata };
}
asset.updatedAt = new Date();
this.assets.set(id, asset);
this.logger.info('Data asset updated', { assetId: id, changes: request });
await this.eventBus.emit('data.asset.updated', {
assetId: id,
asset,
changes: request,
timestamp: new Date()
});
return asset;
} catch (error) {
this.logger.error('Failed to update data asset', { assetId: id, request, error });
throw error;
}
}
async deleteAsset(id: string): Promise<void> {
try {
const asset = this.assets.get(id);
if (!asset) {
throw new Error(`Asset with id ${id} not found`);
}
this.assets.delete(id);
this.logger.info('Data asset deleted', { assetId: id });
await this.eventBus.emit('data.asset.deleted', {
assetId: id,
asset,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to delete data asset', { assetId: id, error });
throw error;
}
}
async listAssets(filters?: Record<string, any>): Promise<DataAsset[]> {
try {
let assets = Array.from(this.assets.values());
if (filters) {
assets = assets.filter(asset => {
return Object.entries(filters).every(([key, value]) => {
if (key === 'type') return asset.type === value;
if (key === 'owner') return asset.owner === value;
if (key === 'classification') return asset.classification === value;
if (key === 'tags') return Array.isArray(value) ?
value.some(tag => asset.tags.includes(tag)) :
asset.tags.includes(value);
return true;
});
});
}
return assets;
} catch (error) {
this.logger.error('Failed to list data assets', { filters, error });
throw error;
}
}
async searchAssets(query: string, filters?: Record<string, any>): Promise<DataAsset[]> {
try {
let assets = Array.from(this.assets.values());
// Simple text search in name, description, and tags
const searchTerm = query.toLowerCase();
assets = assets.filter(asset =>
asset.name.toLowerCase().includes(searchTerm) ||
asset.description.toLowerCase().includes(searchTerm) ||
asset.tags.some(tag => tag.toLowerCase().includes(searchTerm))
);
// Apply additional filters
if (filters) {
assets = assets.filter(asset => {
return Object.entries(filters).every(([key, value]) => {
if (key === 'type') return asset.type === value;
if (key === 'owner') return asset.owner === value;
if (key === 'classification') return asset.classification === value;
return true;
});
});
}
this.logger.info('Asset search completed', {
query,
filters,
resultCount: assets.length
});
return assets;
} catch (error) {
this.logger.error('Failed to search data assets', { query, filters, error });
throw error;
}
}
async getAssetsByOwner(owner: string): Promise<DataAsset[]> {
return this.listAssets({ owner });
}
async getAssetsByType(type: DataAssetType): Promise<DataAsset[]> {
return this.listAssets({ type });
}
async getAssetsByClassification(classification: DataClassification): Promise<DataAsset[]> {
return this.listAssets({ classification });
}
async getAssetsByTags(tags: string[]): Promise<DataAsset[]> {
return this.listAssets({ tags });
}
private generateId(): string {
return `asset_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
}

View file

@ -1,764 +0,0 @@
import { EventBus } from '@stock-bot/event-bus';
import { Logger } from '@stock-bot/utils';
import {
DataGovernance,
GovernancePolicy,
ComplianceCheck,
RetentionPolicy,
AccessControl,
PrivacySettings,
AuditEntry,
DataAsset,
GovernancePolicyType,
ComplianceStatus,
DataClassification
} from '../types/DataCatalog';
export interface DataGovernanceService {
createPolicy(policy: Omit<GovernancePolicy, 'id' | 'createdAt' | 'updatedAt'>): Promise<GovernancePolicy>;
updatePolicy(policyId: string, updates: Partial<GovernancePolicy>): Promise<GovernancePolicy | null>;
deletePolicy(policyId: string): Promise<void>;
getPolicy(policyId: string): Promise<GovernancePolicy | null>;
listPolicies(filters?: Record<string, any>): Promise<GovernancePolicy[]>;
applyPolicy(assetId: string, policyId: string): Promise<void>;
removePolicy(assetId: string, policyId: string): Promise<void>;
checkCompliance(assetId: string): Promise<ComplianceCheck[]>;
updateRetentionPolicy(assetId: string, retention: RetentionPolicy): Promise<void>;
updateAccessControl(assetId: string, access: AccessControl): Promise<void>;
updatePrivacySettings(assetId: string, privacy: PrivacySettings): Promise<void>;
auditAccess(assetId: string, userId: string, action: string, details?: any): Promise<void>;
getAuditTrail(assetId: string, filters?: Record<string, any>): Promise<AuditEntry[]>;
generateComplianceReport(assetIds: string[]): Promise<any>;
validateDataAccess(assetId: string, userId: string, action: string): Promise<boolean>;
anonymizeData(assetId: string, options?: any): Promise<void>;
handleDataSubjectRequest(assetId: string, request: any): Promise<any>;
}
export class DataGovernanceServiceImpl implements DataGovernanceService {
private policies: Map<string, GovernancePolicy> = new Map();
private governance: Map<string, DataGovernance> = new Map();
private assets: Map<string, DataAsset> = new Map();
constructor(
private eventBus: EventBus,
private logger: Logger
) {
this.initializeDefaultPolicies();
}
async createPolicy(policy: Omit<GovernancePolicy, 'id' | 'createdAt' | 'updatedAt'>): Promise<GovernancePolicy> {
try {
const fullPolicy: GovernancePolicy = {
...policy,
id: this.generateId(),
createdAt: new Date(),
updatedAt: new Date()
};
this.policies.set(fullPolicy.id, fullPolicy);
this.logger.info('Governance policy created', {
policyId: fullPolicy.id,
name: fullPolicy.name,
type: fullPolicy.type
});
await this.eventBus.emit('data.governance.policy.created', {
policy: fullPolicy,
timestamp: new Date()
});
return fullPolicy;
} catch (error) {
this.logger.error('Failed to create governance policy', { policy, error });
throw error;
}
}
async updatePolicy(policyId: string, updates: Partial<GovernancePolicy>): Promise<GovernancePolicy | null> {
try {
const policy = this.policies.get(policyId);
if (!policy) {
return null;
}
const updatedPolicy: GovernancePolicy = {
...policy,
...updates,
updatedAt: new Date()
};
this.policies.set(policyId, updatedPolicy);
this.logger.info('Governance policy updated', { policyId, changes: updates });
await this.eventBus.emit('data.governance.policy.updated', {
policy: updatedPolicy,
changes: updates,
timestamp: new Date()
});
return updatedPolicy;
} catch (error) {
this.logger.error('Failed to update governance policy', { policyId, updates, error });
throw error;
}
}
async deletePolicy(policyId: string): Promise<void> {
try {
const policy = this.policies.get(policyId);
if (!policy) {
throw new Error(`Policy with id ${policyId} not found`);
}
this.policies.delete(policyId);
// Remove policy from all assets
for (const [assetId, governance] of this.governance) {
governance.policies = governance.policies.filter(p => p.id !== policyId);
this.governance.set(assetId, governance);
}
this.logger.info('Governance policy deleted', { policyId });
await this.eventBus.emit('data.governance.policy.deleted', {
policyId,
policy,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to delete governance policy', { policyId, error });
throw error;
}
}
async getPolicy(policyId: string): Promise<GovernancePolicy | null> {
try {
return this.policies.get(policyId) || null;
} catch (error) {
this.logger.error('Failed to get governance policy', { policyId, error });
throw error;
}
}
async listPolicies(filters?: Record<string, any>): Promise<GovernancePolicy[]> {
try {
let policies = Array.from(this.policies.values());
if (filters) {
policies = policies.filter(policy => {
return Object.entries(filters).every(([key, value]) => {
if (key === 'type') return policy.type === value;
if (key === 'active') return policy.active === value;
if (key === 'classification') return policy.applicableClassifications?.includes(value);
return true;
});
});
}
return policies;
} catch (error) {
this.logger.error('Failed to list governance policies', { filters, error });
throw error;
}
}
async applyPolicy(assetId: string, policyId: string): Promise<void> {
try {
const policy = this.policies.get(policyId);
if (!policy) {
throw new Error(`Policy with id ${policyId} not found`);
}
let governance = this.governance.get(assetId);
if (!governance) {
governance = this.createEmptyGovernance(assetId);
}
// Check if policy is already applied
if (!governance.policies.find(p => p.id === policyId)) {
governance.policies.push(policy);
this.governance.set(assetId, governance);
// Perform compliance check after applying policy
await this.checkCompliance(assetId);
this.logger.info('Policy applied to asset', { assetId, policyId });
await this.eventBus.emit('data.governance.policy.applied', {
assetId,
policyId,
timestamp: new Date()
});
}
} catch (error) {
this.logger.error('Failed to apply policy to asset', { assetId, policyId, error });
throw error;
}
}
async removePolicy(assetId: string, policyId: string): Promise<void> {
try {
const governance = this.governance.get(assetId);
if (!governance) {
throw new Error(`Governance not found for asset ${assetId}`);
}
governance.policies = governance.policies.filter(p => p.id !== policyId);
this.governance.set(assetId, governance);
this.logger.info('Policy removed from asset', { assetId, policyId });
await this.eventBus.emit('data.governance.policy.removed', {
assetId,
policyId,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to remove policy from asset', { assetId, policyId, error });
throw error;
}
}
async checkCompliance(assetId: string): Promise<ComplianceCheck[]> {
try {
const governance = this.governance.get(assetId);
const asset = this.assets.get(assetId);
if (!governance || !asset) {
return [];
}
const complianceChecks: ComplianceCheck[] = [];
for (const policy of governance.policies) {
if (!policy.active) continue;
const check = await this.performComplianceCheck(asset, policy);
complianceChecks.push(check);
}
// Update governance with compliance results
governance.compliance = complianceChecks;
this.governance.set(assetId, governance);
// Log compliance issues
const failedChecks = complianceChecks.filter(check => check.status === 'failed');
if (failedChecks.length > 0) {
this.logger.warn('Compliance violations detected', {
assetId,
violationCount: failedChecks.length
});
await this.eventBus.emit('data.governance.compliance.violation', {
assetId,
violations: failedChecks,
timestamp: new Date()
});
}
return complianceChecks;
} catch (error) {
this.logger.error('Failed to check compliance', { assetId, error });
throw error;
}
}
async updateRetentionPolicy(assetId: string, retention: RetentionPolicy): Promise<void> {
try {
let governance = this.governance.get(assetId);
if (!governance) {
governance = this.createEmptyGovernance(assetId);
}
governance.retention = retention;
this.governance.set(assetId, governance);
this.logger.info('Retention policy updated', { assetId, retentionPeriod: retention.retentionPeriod });
await this.eventBus.emit('data.governance.retention.updated', {
assetId,
retention,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to update retention policy', { assetId, retention, error });
throw error;
}
}
async updateAccessControl(assetId: string, access: AccessControl): Promise<void> {
try {
let governance = this.governance.get(assetId);
if (!governance) {
governance = this.createEmptyGovernance(assetId);
}
governance.access = access;
this.governance.set(assetId, governance);
this.logger.info('Access control updated', { assetId, defaultAccess: access.defaultAccess });
await this.eventBus.emit('data.governance.access.updated', {
assetId,
access,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to update access control', { assetId, access, error });
throw error;
}
}
async updatePrivacySettings(assetId: string, privacy: PrivacySettings): Promise<void> {
try {
let governance = this.governance.get(assetId);
if (!governance) {
governance = this.createEmptyGovernance(assetId);
}
governance.privacy = privacy;
this.governance.set(assetId, governance);
this.logger.info('Privacy settings updated', {
assetId,
containsPII: privacy.containsPII,
consentRequired: privacy.consentRequired
});
await this.eventBus.emit('data.governance.privacy.updated', {
assetId,
privacy,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to update privacy settings', { assetId, privacy, error });
throw error;
}
}
async auditAccess(assetId: string, userId: string, action: string, details?: any): Promise<void> {
try {
let governance = this.governance.get(assetId);
if (!governance) {
governance = this.createEmptyGovernance(assetId);
}
const auditEntry: AuditEntry = {
id: this.generateId(),
userId,
action,
timestamp: new Date(),
ipAddress: details?.ipAddress,
userAgent: details?.userAgent,
details
};
governance.audit.push(auditEntry);
this.governance.set(assetId, governance);
this.logger.info('Access audited', { assetId, userId, action });
await this.eventBus.emit('data.governance.access.audited', {
assetId,
auditEntry,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to audit access', { assetId, userId, action, error });
throw error;
}
}
async getAuditTrail(assetId: string, filters?: Record<string, any>): Promise<AuditEntry[]> {
try {
const governance = this.governance.get(assetId);
if (!governance) {
return [];
}
let auditEntries = governance.audit;
if (filters) {
auditEntries = auditEntries.filter(entry => {
return Object.entries(filters).every(([key, value]) => {
if (key === 'userId') return entry.userId === value;
if (key === 'action') return entry.action === value;
if (key === 'fromDate') return entry.timestamp >= new Date(value);
if (key === 'toDate') return entry.timestamp <= new Date(value);
return true;
});
});
}
return auditEntries.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());
} catch (error) {
this.logger.error('Failed to get audit trail', { assetId, filters, error });
throw error;
}
}
async generateComplianceReport(assetIds: string[]): Promise<any> {
try {
const reportData = {
summary: {
totalAssets: assetIds.length,
compliantAssets: 0,
nonCompliantAssets: 0,
violationCount: 0,
reportDate: new Date()
},
assetCompliance: [] as any[],
policyViolations: [] as any[],
recommendations: [] as string[]
};
let totalViolations = 0;
for (const assetId of assetIds) {
const governance = this.governance.get(assetId);
const asset = this.assets.get(assetId);
if (governance && asset) {
const complianceChecks = await this.checkCompliance(assetId);
const violations = complianceChecks.filter(check => check.status === 'failed');
const isCompliant = violations.length === 0;
if (isCompliant) {
reportData.summary.compliantAssets++;
} else {
reportData.summary.nonCompliantAssets++;
}
totalViolations += violations.length;
reportData.assetCompliance.push({
assetId,
assetName: asset.name,
classification: asset.classification,
compliant: isCompliant,
violationCount: violations.length,
policiesApplied: governance.policies.length,
lastChecked: new Date()
});
// Add violations to report
violations.forEach(violation => {
reportData.policyViolations.push({
assetId,
assetName: asset.name,
policyName: violation.policyName,
violation: violation.details,
severity: violation.severity || 'medium',
checkedAt: violation.checkedAt
});
});
}
}
reportData.summary.violationCount = totalViolations;
// Generate recommendations
reportData.recommendations = this.generateComplianceRecommendations(reportData);
this.logger.info('Compliance report generated', {
totalAssets: assetIds.length,
compliantAssets: reportData.summary.compliantAssets,
violationCount: totalViolations
});
return reportData;
} catch (error) {
this.logger.error('Failed to generate compliance report', { assetIds, error });
throw error;
}
}
async validateDataAccess(assetId: string, userId: string, action: string): Promise<boolean> {
try {
const governance = this.governance.get(assetId);
const asset = this.assets.get(assetId);
if (!governance || !asset) {
return false;
}
// Check default access
if (governance.access.defaultAccess === 'none') {
// Must have explicit permission
const hasUserAccess = governance.access.users.some(user =>
user.userId === userId && user.permissions.includes(action)
);
const hasRoleAccess = governance.access.roles.some(role =>
role.permissions.includes(action) // Simplified - would check user roles
);
return hasUserAccess || hasRoleAccess;
}
// Check if explicitly denied
const isDenied = governance.access.users.some(user =>
user.userId === userId && user.permissions.includes(`deny:${action}`)
);
if (isDenied) {
return false;
}
// Check classification-based access
if (asset.classification === 'restricted' || asset.classification === 'confidential') {
// Require explicit permission for sensitive data
const hasPermission = governance.access.users.some(user =>
user.userId === userId && user.permissions.includes(action)
);
return hasPermission;
}
return true; // Default allow for non-sensitive data
} catch (error) {
this.logger.error('Failed to validate data access', { assetId, userId, action, error });
return false;
}
}
async anonymizeData(assetId: string, options?: any): Promise<void> {
try {
const governance = this.governance.get(assetId);
if (!governance || !governance.privacy.containsPII) {
return;
}
// Apply anonymization rules
for (const rule of governance.privacy.anonymizationRules) {
await this.applyAnonymizationRule(assetId, rule, options);
}
this.logger.info('Data anonymization completed', { assetId });
await this.eventBus.emit('data.governance.anonymization.completed', {
assetId,
options,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to anonymize data', { assetId, options, error });
throw error;
}
}
async handleDataSubjectRequest(assetId: string, request: any): Promise<any> {
try {
const governance = this.governance.get(assetId);
const asset = this.assets.get(assetId);
if (!governance || !asset) {
throw new Error(`Asset or governance not found for ${assetId}`);
}
let response: any = {};
switch (request.type) {
case 'access':
response = await this.handleAccessRequest(assetId, request);
break;
case 'rectification':
response = await this.handleRectificationRequest(assetId, request);
break;
case 'erasure':
response = await this.handleErasureRequest(assetId, request);
break;
case 'portability':
response = await this.handlePortabilityRequest(assetId, request);
break;
default:
throw new Error(`Unsupported request type: ${request.type}`);
}
this.logger.info('Data subject request handled', { assetId, requestType: request.type });
await this.eventBus.emit('data.governance.subject.request.handled', {
assetId,
request,
response,
timestamp: new Date()
});
return response;
} catch (error) {
this.logger.error('Failed to handle data subject request', { assetId, request, error });
throw error;
}
}
// Private helper methods
private initializeDefaultPolicies(): void {
const defaultPolicies: GovernancePolicy[] = [
{
id: 'policy_pii_protection',
name: 'PII Protection Policy',
description: 'Ensures proper handling of personally identifiable information',
type: 'privacy',
rules: [
'PII data must be encrypted at rest',
'PII access must be logged',
'PII retention must not exceed 7 years'
],
applicableClassifications: ['pii'],
active: true,
severity: 'high',
createdAt: new Date(),
updatedAt: new Date()
},
{
id: 'policy_financial_compliance',
name: 'Financial Data Compliance',
description: 'Compliance with financial regulations',
type: 'compliance',
rules: [
'Financial data must be retained for 7 years',
'Access to financial data must be role-based',
'All financial data access must be audited'
],
applicableClassifications: ['financial'],
active: true,
severity: 'critical',
createdAt: new Date(),
updatedAt: new Date()
}
];
defaultPolicies.forEach(policy => {
this.policies.set(policy.id, policy);
});
}
private createEmptyGovernance(assetId: string): DataGovernance {
return {
id: this.generateId(),
assetId,
policies: [],
compliance: [],
retention: {
retentionPeriod: 365,
retentionReason: 'Business requirement',
legalHold: false
},
access: {
defaultAccess: 'none',
roles: [],
users: []
},
privacy: {
containsPII: false,
sensitiveFields: [],
anonymizationRules: [],
consentRequired: false,
dataSubjectRights: []
},
audit: []
};
}
private async performComplianceCheck(asset: DataAsset, policy: GovernancePolicy): Promise<ComplianceCheck> {
// Mock compliance check implementation
// In real scenario, this would validate actual compliance
const isCompliant = Math.random() > 0.1; // 90% compliance rate for demo
const check: ComplianceCheck = {
id: this.generateId(),
policyId: policy.id,
policyName: policy.name,
status: isCompliant ? 'passed' : 'failed',
checkedAt: new Date(),
details: isCompliant ? 'All policy requirements met' : 'Policy violation detected',
severity: policy.severity
};
if (!isCompliant) {
check.recommendations = [
'Review data handling procedures',
'Update access controls',
'Implement additional monitoring'
];
}
return check;
}
private async applyAnonymizationRule(assetId: string, rule: any, options?: any): Promise<void> {
// Mock anonymization implementation
this.logger.info('Applying anonymization rule', { assetId, rule: rule.type });
}
private async handleAccessRequest(assetId: string, request: any): Promise<any> {
return {
status: 'completed',
data: 'Data access provided according to privacy policy',
timestamp: new Date()
};
}
private async handleRectificationRequest(assetId: string, request: any): Promise<any> {
return {
status: 'completed',
changes: 'Data rectification completed',
timestamp: new Date()
};
}
private async handleErasureRequest(assetId: string, request: any): Promise<any> {
return {
status: 'completed',
erasure: 'Data erasure completed',
timestamp: new Date()
};
}
private async handlePortabilityRequest(assetId: string, request: any): Promise<any> {
return {
status: 'completed',
export: 'Data export provided',
timestamp: new Date()
};
}
private generateComplianceRecommendations(reportData: any): string[] {
const recommendations: string[] = [];
if (reportData.summary.nonCompliantAssets > 0) {
recommendations.push(`${reportData.summary.nonCompliantAssets} assets require compliance remediation.`);
}
if (reportData.summary.violationCount > 10) {
recommendations.push('High number of policy violations detected. Review governance policies and implementation.');
}
const criticalViolations = reportData.policyViolations.filter((v: any) => v.severity === 'critical');
if (criticalViolations.length > 0) {
recommendations.push(`${criticalViolations.length} critical violations require immediate attention.`);
}
if (recommendations.length === 0) {
recommendations.push('All assets are compliant with governance policies. Continue monitoring.');
}
return recommendations;
}
private generateId(): string {
return `governance_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
// Method to inject assets (typically from DataCatalogService)
setAssets(assets: Map<string, DataAsset>): void {
this.assets = assets;
}
// Method to inject governance (typically from DataCatalogService)
setGovernance(governance: Map<string, DataGovernance>): void {
this.governance = governance;
}
}

View file

@ -1,607 +0,0 @@
import { EventBus } from '@stock-bot/event-bus';
import { Logger } from '@stock-bot/utils';
import {
DataLineage,
DataAsset,
LineageTransformation,
ImpactAnalysis,
LineageQuery,
LineageDirection
} from '../types/DataCatalog';
export interface DataLineageService {
addLineage(lineage: DataLineage): Promise<void>;
getLineage(assetId: string): Promise<DataLineage | null>;
updateLineage(assetId: string, lineage: Partial<DataLineage>): Promise<DataLineage | null>;
addUpstreamDependency(assetId: string, upstreamAssetId: string, transformation?: LineageTransformation): Promise<void>;
addDownstreamDependency(assetId: string, downstreamAssetId: string, transformation?: LineageTransformation): Promise<void>;
removeUpstreamDependency(assetId: string, upstreamAssetId: string): Promise<void>;
removeDownstreamDependency(assetId: string, downstreamAssetId: string): Promise<void>;
getUpstreamAssets(assetId: string, depth?: number): Promise<DataAsset[]>;
getDownstreamAssets(assetId: string, depth?: number): Promise<DataAsset[]>;
analyzeImpact(assetId: string): Promise<ImpactAnalysis>;
queryLineage(query: LineageQuery): Promise<DataAsset[]>;
getLineageGraph(assetId: string, direction: LineageDirection, depth?: number): Promise<any>;
detectCircularDependencies(): Promise<string[][]>;
}
export class DataLineageServiceImpl implements DataLineageService {
private lineages: Map<string, DataLineage> = new Map();
private assets: Map<string, DataAsset> = new Map();
constructor(
private eventBus: EventBus,
private logger: Logger
) {}
async addLineage(lineage: DataLineage): Promise<void> {
try {
this.lineages.set(lineage.assetId, lineage);
this.logger.info('Data lineage added', {
assetId: lineage.assetId,
upstreamCount: lineage.upstreamAssets.length,
downstreamCount: lineage.downstreamAssets.length
});
await this.eventBus.emit('data.lineage.added', {
assetId: lineage.assetId,
lineage,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to add data lineage', { lineage, error });
throw error;
}
}
async getLineage(assetId: string): Promise<DataLineage | null> {
try {
return this.lineages.get(assetId) || null;
} catch (error) {
this.logger.error('Failed to get data lineage', { assetId, error });
throw error;
}
}
async updateLineage(assetId: string, lineage: Partial<DataLineage>): Promise<DataLineage | null> {
try {
const existingLineage = this.lineages.get(assetId);
if (!existingLineage) {
return null;
}
const updatedLineage: DataLineage = {
...existingLineage,
...lineage,
updatedAt: new Date()
};
this.lineages.set(assetId, updatedLineage);
this.logger.info('Data lineage updated', { assetId, changes: lineage });
await this.eventBus.emit('data.lineage.updated', {
assetId,
lineage: updatedLineage,
changes: lineage,
timestamp: new Date()
});
return updatedLineage;
} catch (error) {
this.logger.error('Failed to update data lineage', { assetId, lineage, error });
throw error;
}
}
async addUpstreamDependency(
assetId: string,
upstreamAssetId: string,
transformation?: LineageTransformation
): Promise<void> {
try {
let lineage = this.lineages.get(assetId);
if (!lineage) {
lineage = this.createEmptyLineage(assetId);
}
// Check if dependency already exists
if (!lineage.upstreamAssets.includes(upstreamAssetId)) {
lineage.upstreamAssets.push(upstreamAssetId);
if (transformation) {
lineage.transformations.push(transformation);
}
lineage.updatedAt = new Date();
this.lineages.set(assetId, lineage);
// Update downstream lineage of the upstream asset
await this.addDownstreamToUpstream(upstreamAssetId, assetId);
this.logger.info('Upstream dependency added', { assetId, upstreamAssetId });
await this.eventBus.emit('data.lineage.dependency.added', {
assetId,
upstreamAssetId,
transformation,
timestamp: new Date()
});
}
} catch (error) {
this.logger.error('Failed to add upstream dependency', { assetId, upstreamAssetId, error });
throw error;
}
}
async addDownstreamDependency(
assetId: string,
downstreamAssetId: string,
transformation?: LineageTransformation
): Promise<void> {
try {
let lineage = this.lineages.get(assetId);
if (!lineage) {
lineage = this.createEmptyLineage(assetId);
}
// Check if dependency already exists
if (!lineage.downstreamAssets.includes(downstreamAssetId)) {
lineage.downstreamAssets.push(downstreamAssetId);
lineage.updatedAt = new Date();
this.lineages.set(assetId, lineage);
// Update upstream lineage of the downstream asset
await this.addUpstreamToDownstream(downstreamAssetId, assetId, transformation);
this.logger.info('Downstream dependency added', { assetId, downstreamAssetId });
await this.eventBus.emit('data.lineage.dependency.added', {
assetId,
downstreamAssetId,
transformation,
timestamp: new Date()
});
}
} catch (error) {
this.logger.error('Failed to add downstream dependency', { assetId, downstreamAssetId, error });
throw error;
}
}
async removeUpstreamDependency(assetId: string, upstreamAssetId: string): Promise<void> {
try {
const lineage = this.lineages.get(assetId);
if (lineage) {
lineage.upstreamAssets = lineage.upstreamAssets.filter(id => id !== upstreamAssetId);
lineage.updatedAt = new Date();
this.lineages.set(assetId, lineage);
// Remove from downstream lineage of upstream asset
await this.removeDownstreamFromUpstream(upstreamAssetId, assetId);
this.logger.info('Upstream dependency removed', { assetId, upstreamAssetId });
await this.eventBus.emit('data.lineage.dependency.removed', {
assetId,
upstreamAssetId,
timestamp: new Date()
});
}
} catch (error) {
this.logger.error('Failed to remove upstream dependency', { assetId, upstreamAssetId, error });
throw error;
}
}
async removeDownstreamDependency(assetId: string, downstreamAssetId: string): Promise<void> {
try {
const lineage = this.lineages.get(assetId);
if (lineage) {
lineage.downstreamAssets = lineage.downstreamAssets.filter(id => id !== downstreamAssetId);
lineage.updatedAt = new Date();
this.lineages.set(assetId, lineage);
// Remove from upstream lineage of downstream asset
await this.removeUpstreamFromDownstream(downstreamAssetId, assetId);
this.logger.info('Downstream dependency removed', { assetId, downstreamAssetId });
await this.eventBus.emit('data.lineage.dependency.removed', {
assetId,
downstreamAssetId,
timestamp: new Date()
});
}
} catch (error) {
this.logger.error('Failed to remove downstream dependency', { assetId, downstreamAssetId, error });
throw error;
}
}
async getUpstreamAssets(assetId: string, depth: number = 1): Promise<DataAsset[]> {
try {
const visited = new Set<string>();
const result: DataAsset[] = [];
await this.traverseUpstream(assetId, depth, visited, result);
return result;
} catch (error) {
this.logger.error('Failed to get upstream assets', { assetId, depth, error });
throw error;
}
}
async getDownstreamAssets(assetId: string, depth: number = 1): Promise<DataAsset[]> {
try {
const visited = new Set<string>();
const result: DataAsset[] = [];
await this.traverseDownstream(assetId, depth, visited, result);
return result;
} catch (error) {
this.logger.error('Failed to get downstream assets', { assetId, depth, error });
throw error;
}
}
async analyzeImpact(assetId: string): Promise<ImpactAnalysis> {
try {
const downstreamAssets = await this.getDownstreamAssets(assetId, 5); // Go deep for impact analysis
const affectedUsers = new Set<string>();
// Collect all users who might be affected
for (const asset of downstreamAssets) {
affectedUsers.add(asset.owner);
if (asset.steward) {
affectedUsers.add(asset.steward);
}
// Add users from usage analytics
asset.usage.topUsers.forEach(user => affectedUsers.add(user.userId));
}
// Calculate impact level
let estimatedImpact: 'low' | 'medium' | 'high' | 'critical' = 'low';
if (downstreamAssets.length > 20) {
estimatedImpact = 'critical';
} else if (downstreamAssets.length > 10) {
estimatedImpact = 'high';
} else if (downstreamAssets.length > 5) {
estimatedImpact = 'medium';
}
const impact: ImpactAnalysis = {
downstreamAssets: downstreamAssets.map(asset => asset.id),
affectedUsers: Array.from(affectedUsers),
estimatedImpact,
impactDescription: this.generateImpactDescription(downstreamAssets.length, Array.from(affectedUsers).length),
recommendations: this.generateRecommendations(estimatedImpact, downstreamAssets.length)
};
this.logger.info('Impact analysis completed', {
assetId,
impactLevel: estimatedImpact,
affectedAssets: downstreamAssets.length,
affectedUsers: affectedUsers.size
});
return impact;
} catch (error) {
this.logger.error('Failed to analyze impact', { assetId, error });
throw error;
}
}
async queryLineage(query: LineageQuery): Promise<DataAsset[]> {
try {
let results: DataAsset[] = [];
if (query.assetIds) {
for (const assetId of query.assetIds) {
if (query.direction === 'upstream' || query.direction === 'both') {
const upstream = await this.getUpstreamAssets(assetId, query.depth);
results.push(...upstream);
}
if (query.direction === 'downstream' || query.direction === 'both') {
const downstream = await this.getDownstreamAssets(assetId, query.depth);
results.push(...downstream);
}
}
}
// Remove duplicates
const uniqueResults = results.filter((asset, index, arr) =>
arr.findIndex(a => a.id === asset.id) === index
);
return uniqueResults;
} catch (error) {
this.logger.error('Failed to query lineage', { query, error });
throw error;
}
}
async getLineageGraph(assetId: string, direction: LineageDirection, depth: number = 3): Promise<any> {
try {
const graph = {
nodes: new Map(),
edges: []
};
const visited = new Set<string>();
await this.buildLineageGraph(assetId, direction, depth, visited, graph);
return {
nodes: Array.from(graph.nodes.values()),
edges: graph.edges
};
} catch (error) {
this.logger.error('Failed to get lineage graph', { assetId, direction, depth, error });
throw error;
}
}
async detectCircularDependencies(): Promise<string[][]> {
try {
const cycles: string[][] = [];
const visited = new Set<string>();
const recursionStack = new Set<string>();
for (const assetId of this.lineages.keys()) {
if (!visited.has(assetId)) {
const path: string[] = [];
await this.detectCycleDFS(assetId, visited, recursionStack, path, cycles);
}
}
if (cycles.length > 0) {
this.logger.warn('Circular dependencies detected', { cycleCount: cycles.length });
}
return cycles;
} catch (error) {
this.logger.error('Failed to detect circular dependencies', { error });
throw error;
}
}
// Private helper methods
private createEmptyLineage(assetId: string): DataLineage {
return {
id: this.generateId(),
assetId,
upstreamAssets: [],
downstreamAssets: [],
transformations: [],
impact: {
downstreamAssets: [],
affectedUsers: [],
estimatedImpact: 'low',
impactDescription: '',
recommendations: []
},
createdAt: new Date(),
updatedAt: new Date()
};
}
private async addDownstreamToUpstream(upstreamAssetId: string, downstreamAssetId: string): Promise<void> {
let upstreamLineage = this.lineages.get(upstreamAssetId);
if (!upstreamLineage) {
upstreamLineage = this.createEmptyLineage(upstreamAssetId);
}
if (!upstreamLineage.downstreamAssets.includes(downstreamAssetId)) {
upstreamLineage.downstreamAssets.push(downstreamAssetId);
upstreamLineage.updatedAt = new Date();
this.lineages.set(upstreamAssetId, upstreamLineage);
}
}
private async addUpstreamToDownstream(
downstreamAssetId: string,
upstreamAssetId: string,
transformation?: LineageTransformation
): Promise<void> {
let downstreamLineage = this.lineages.get(downstreamAssetId);
if (!downstreamLineage) {
downstreamLineage = this.createEmptyLineage(downstreamAssetId);
}
if (!downstreamLineage.upstreamAssets.includes(upstreamAssetId)) {
downstreamLineage.upstreamAssets.push(upstreamAssetId);
if (transformation) {
downstreamLineage.transformations.push(transformation);
}
downstreamLineage.updatedAt = new Date();
this.lineages.set(downstreamAssetId, downstreamLineage);
}
}
private async removeDownstreamFromUpstream(upstreamAssetId: string, downstreamAssetId: string): Promise<void> {
const upstreamLineage = this.lineages.get(upstreamAssetId);
if (upstreamLineage) {
upstreamLineage.downstreamAssets = upstreamLineage.downstreamAssets.filter(id => id !== downstreamAssetId);
upstreamLineage.updatedAt = new Date();
this.lineages.set(upstreamAssetId, upstreamLineage);
}
}
private async removeUpstreamFromDownstream(downstreamAssetId: string, upstreamAssetId: string): Promise<void> {
const downstreamLineage = this.lineages.get(downstreamAssetId);
if (downstreamLineage) {
downstreamLineage.upstreamAssets = downstreamLineage.upstreamAssets.filter(id => id !== upstreamAssetId);
downstreamLineage.updatedAt = new Date();
this.lineages.set(downstreamAssetId, downstreamLineage);
}
}
private async traverseUpstream(
assetId: string,
remainingDepth: number,
visited: Set<string>,
result: DataAsset[]
): Promise<void> {
if (remainingDepth === 0 || visited.has(assetId)) {
return;
}
visited.add(assetId);
const lineage = this.lineages.get(assetId);
if (lineage) {
for (const upstreamId of lineage.upstreamAssets) {
const asset = this.assets.get(upstreamId);
if (asset && !result.find(a => a.id === asset.id)) {
result.push(asset);
}
await this.traverseUpstream(upstreamId, remainingDepth - 1, visited, result);
}
}
}
private async traverseDownstream(
assetId: string,
remainingDepth: number,
visited: Set<string>,
result: DataAsset[]
): Promise<void> {
if (remainingDepth === 0 || visited.has(assetId)) {
return;
}
visited.add(assetId);
const lineage = this.lineages.get(assetId);
if (lineage) {
for (const downstreamId of lineage.downstreamAssets) {
const asset = this.assets.get(downstreamId);
if (asset && !result.find(a => a.id === asset.id)) {
result.push(asset);
}
await this.traverseDownstream(downstreamId, remainingDepth - 1, visited, result);
}
}
}
private async buildLineageGraph(
assetId: string,
direction: LineageDirection,
remainingDepth: number,
visited: Set<string>,
graph: any
): Promise<void> {
if (remainingDepth === 0 || visited.has(assetId)) {
return;
}
visited.add(assetId);
const asset = this.assets.get(assetId);
const lineage = this.lineages.get(assetId);
if (asset) {
graph.nodes.set(assetId, {
id: assetId,
name: asset.name,
type: asset.type,
classification: asset.classification
});
}
if (lineage) {
if (direction === 'upstream' || direction === 'both') {
for (const upstreamId of lineage.upstreamAssets) {
graph.edges.push({
source: upstreamId,
target: assetId,
type: 'upstream'
});
await this.buildLineageGraph(upstreamId, direction, remainingDepth - 1, visited, graph);
}
}
if (direction === 'downstream' || direction === 'both') {
for (const downstreamId of lineage.downstreamAssets) {
graph.edges.push({
source: assetId,
target: downstreamId,
type: 'downstream'
});
await this.buildLineageGraph(downstreamId, direction, remainingDepth - 1, visited, graph);
}
}
}
}
private async detectCycleDFS(
assetId: string,
visited: Set<string>,
recursionStack: Set<string>,
path: string[],
cycles: string[][]
): Promise<void> {
visited.add(assetId);
recursionStack.add(assetId);
path.push(assetId);
const lineage = this.lineages.get(assetId);
if (lineage) {
for (const downstreamId of lineage.downstreamAssets) {
if (!visited.has(downstreamId)) {
await this.detectCycleDFS(downstreamId, visited, recursionStack, path, cycles);
} else if (recursionStack.has(downstreamId)) {
// Found a cycle
const cycleStart = path.indexOf(downstreamId);
cycles.push(path.slice(cycleStart));
}
}
}
path.pop();
recursionStack.delete(assetId);
}
private generateImpactDescription(assetCount: number, userCount: number): string {
if (assetCount === 0) {
return 'No downstream dependencies identified.';
}
return `Changes to this asset may affect ${assetCount} downstream asset(s) and ${userCount} user(s).`;
}
private generateRecommendations(impact: string, assetCount: number): string[] {
const recommendations: string[] = [];
if (impact === 'critical') {
recommendations.push('Schedule maintenance window');
recommendations.push('Notify all stakeholders in advance');
recommendations.push('Prepare rollback plan');
recommendations.push('Consider phased rollout');
} else if (impact === 'high') {
recommendations.push('Notify affected users');
recommendations.push('Test changes thoroughly');
recommendations.push('Monitor downstream systems');
} else if (impact === 'medium') {
recommendations.push('Test with subset of data');
recommendations.push('Monitor for issues');
} else {
recommendations.push('Standard testing procedures apply');
}
return recommendations;
}
private generateId(): string {
return `lineage_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
// Method to inject assets (typically from DataCatalogService)
setAssets(assets: Map<string, DataAsset>): void {
this.assets = assets;
}
}

View file

@ -1,734 +0,0 @@
import { EventBus } from '@stock-bot/event-bus';
import { Logger } from '@stock-bot/utils';
import {
DataQuality,
QualityDimension,
QualityRule,
QualityIssue,
QualityTrend,
DataAsset,
QualityAssessmentRequest,
QualityRuleType,
QualitySeverity
} from '../types/DataCatalog';
export interface DataQualityService {
assessQuality(assetId: string, request: QualityAssessmentRequest): Promise<DataQuality>;
getQuality(assetId: string): Promise<DataQuality | null>;
updateQuality(assetId: string, quality: Partial<DataQuality>): Promise<DataQuality | null>;
addQualityRule(assetId: string, rule: QualityRule): Promise<void>;
removeQualityRule(assetId: string, ruleId: string): Promise<void>;
validateRule(assetId: string, ruleId: string): Promise<boolean>;
reportIssue(assetId: string, issue: Omit<QualityIssue, 'id' | 'detectedAt'>): Promise<void>;
resolveIssue(assetId: string, issueId: string): Promise<void>;
getTrendAnalysis(assetId: string, timeframe: string): Promise<QualityTrend>;
getQualityMetrics(filters?: Record<string, any>): Promise<any>;
generateQualityReport(assetIds: string[]): Promise<any>;
}
export class DataQualityServiceImpl implements DataQualityService {
private qualities: Map<string, DataQuality> = new Map();
private assets: Map<string, DataAsset> = new Map();
constructor(
private eventBus: EventBus,
private logger: Logger
) {}
async assessQuality(assetId: string, request: QualityAssessmentRequest): Promise<DataQuality> {
try {
const asset = this.assets.get(assetId);
if (!asset) {
throw new Error(`Asset with id ${assetId} not found`);
}
let quality = this.qualities.get(assetId);
if (!quality) {
quality = this.createEmptyQuality(assetId);
}
// Perform quality assessment based on request
const assessmentResults = await this.performQualityAssessment(asset, request);
// Update quality metrics
quality.dimensions = assessmentResults.dimensions;
quality.overallScore = this.calculateOverallScore(assessmentResults.dimensions);
quality.lastAssessment = new Date();
// Update trend data
this.updateQualityTrend(quality, quality.overallScore);
this.qualities.set(assetId, quality);
this.logger.info('Quality assessment completed', {
assetId,
overallScore: quality.overallScore,
dimensionCount: quality.dimensions.length
});
await this.eventBus.emit('data.quality.assessed', {
assetId,
quality,
request,
timestamp: new Date()
});
return quality;
} catch (error) {
this.logger.error('Failed to assess quality', { assetId, request, error });
throw error;
}
}
async getQuality(assetId: string): Promise<DataQuality | null> {
try {
return this.qualities.get(assetId) || null;
} catch (error) {
this.logger.error('Failed to get quality', { assetId, error });
throw error;
}
}
async updateQuality(assetId: string, quality: Partial<DataQuality>): Promise<DataQuality | null> {
try {
const existingQuality = this.qualities.get(assetId);
if (!existingQuality) {
return null;
}
const updatedQuality: DataQuality = {
...existingQuality,
...quality,
lastAssessment: new Date()
};
this.qualities.set(assetId, updatedQuality);
this.logger.info('Quality updated', { assetId, changes: quality });
await this.eventBus.emit('data.quality.updated', {
assetId,
quality: updatedQuality,
changes: quality,
timestamp: new Date()
});
return updatedQuality;
} catch (error) {
this.logger.error('Failed to update quality', { assetId, quality, error });
throw error;
}
}
async addQualityRule(assetId: string, rule: QualityRule): Promise<void> {
try {
let quality = this.qualities.get(assetId);
if (!quality) {
quality = this.createEmptyQuality(assetId);
}
// Ensure rule has an ID
if (!rule.id) {
rule.id = this.generateId();
}
quality.rules.push(rule);
this.qualities.set(assetId, quality);
this.logger.info('Quality rule added', { assetId, ruleId: rule.id, ruleType: rule.type });
await this.eventBus.emit('data.quality.rule.added', {
assetId,
rule,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to add quality rule', { assetId, rule, error });
throw error;
}
}
async removeQualityRule(assetId: string, ruleId: string): Promise<void> {
try {
const quality = this.qualities.get(assetId);
if (!quality) {
throw new Error(`Quality not found for asset ${assetId}`);
}
quality.rules = quality.rules.filter(rule => rule.id !== ruleId);
this.qualities.set(assetId, quality);
this.logger.info('Quality rule removed', { assetId, ruleId });
await this.eventBus.emit('data.quality.rule.removed', {
assetId,
ruleId,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to remove quality rule', { assetId, ruleId, error });
throw error;
}
}
async validateRule(assetId: string, ruleId: string): Promise<boolean> {
try {
const quality = this.qualities.get(assetId);
const asset = this.assets.get(assetId);
if (!quality || !asset) {
return false;
}
const rule = quality.rules.find(r => r.id === ruleId);
if (!rule) {
return false;
}
const isValid = await this.executeQualityRule(asset, rule);
if (!isValid) {
// Create quality issue
const issue: QualityIssue = {
id: this.generateId(),
ruleId: rule.id,
type: rule.type,
severity: rule.severity,
message: `Quality rule validation failed: ${rule.description}`,
detectedAt: new Date(),
resolved: false
};
quality.issues.push(issue);
this.qualities.set(assetId, quality);
await this.eventBus.emit('data.quality.issue.detected', {
assetId,
issue,
rule,
timestamp: new Date()
});
}
return isValid;
} catch (error) {
this.logger.error('Failed to validate quality rule', { assetId, ruleId, error });
throw error;
}
}
async reportIssue(assetId: string, issue: Omit<QualityIssue, 'id' | 'detectedAt'>): Promise<void> {
try {
let quality = this.qualities.get(assetId);
if (!quality) {
quality = this.createEmptyQuality(assetId);
}
const fullIssue: QualityIssue = {
...issue,
id: this.generateId(),
detectedAt: new Date()
};
quality.issues.push(fullIssue);
this.qualities.set(assetId, quality);
this.logger.info('Quality issue reported', {
assetId,
issueId: fullIssue.id,
severity: fullIssue.severity
});
await this.eventBus.emit('data.quality.issue.reported', {
assetId,
issue: fullIssue,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to report quality issue', { assetId, issue, error });
throw error;
}
}
async resolveIssue(assetId: string, issueId: string): Promise<void> {
try {
const quality = this.qualities.get(assetId);
if (!quality) {
throw new Error(`Quality not found for asset ${assetId}`);
}
const issue = quality.issues.find(i => i.id === issueId);
if (!issue) {
throw new Error(`Issue ${issueId} not found for asset ${assetId}`);
}
issue.resolved = true;
issue.resolvedAt = new Date();
this.qualities.set(assetId, quality);
this.logger.info('Quality issue resolved', { assetId, issueId });
await this.eventBus.emit('data.quality.issue.resolved', {
assetId,
issue,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to resolve quality issue', { assetId, issueId, error });
throw error;
}
}
async getTrendAnalysis(assetId: string, timeframe: string): Promise<QualityTrend> {
try {
const quality = this.qualities.get(assetId);
if (!quality) {
throw new Error(`Quality not found for asset ${assetId}`);
}
// Filter trend data by timeframe
const filteredTrend = this.filterTrendByTimeframe(quality.trend, timeframe);
// Calculate trend direction and change rate
const trendAnalysis = this.analyzeTrend(filteredTrend.dataPoints);
return {
...filteredTrend,
trend: trendAnalysis.direction,
changeRate: trendAnalysis.changeRate
};
} catch (error) {
this.logger.error('Failed to get trend analysis', { assetId, timeframe, error });
throw error;
}
}
async getQualityMetrics(filters?: Record<string, any>): Promise<any> {
try {
let qualities = Array.from(this.qualities.values());
// Apply filters if provided
if (filters) {
const assets = Array.from(this.assets.values());
const filteredAssets = assets.filter(asset => {
return Object.entries(filters).every(([key, value]) => {
if (key === 'type') return asset.type === value;
if (key === 'owner') return asset.owner === value;
if (key === 'classification') return asset.classification === value;
return true;
});
});
qualities = qualities.filter(quality =>
filteredAssets.some(asset => asset.id === quality.assetId)
);
}
// Calculate aggregate metrics
const metrics = {
totalAssets: qualities.length,
averageQualityScore: this.calculateAverageScore(qualities),
qualityDistribution: this.calculateQualityDistribution(qualities),
topIssues: this.getTopQualityIssues(qualities),
trendSummary: this.getTrendSummary(qualities),
ruleCompliance: this.calculateRuleCompliance(qualities)
};
this.logger.info('Quality metrics calculated', {
totalAssets: metrics.totalAssets,
averageScore: metrics.averageQualityScore
});
return metrics;
} catch (error) {
this.logger.error('Failed to get quality metrics', { filters, error });
throw error;
}
}
async generateQualityReport(assetIds: string[]): Promise<any> {
try {
const reportData = {
summary: {
totalAssets: assetIds.length,
assessmentDate: new Date(),
averageScore: 0,
criticalIssues: 0,
highIssues: 0
},
assetDetails: [] as any[],
recommendations: [] as string[]
};
let totalScore = 0;
let criticalCount = 0;
let highCount = 0;
for (const assetId of assetIds) {
const quality = this.qualities.get(assetId);
const asset = this.assets.get(assetId);
if (quality && asset) {
totalScore += quality.overallScore;
const criticalIssuesCount = quality.issues.filter(i =>
i.severity === 'critical' && !i.resolved
).length;
const highIssuesCount = quality.issues.filter(i =>
i.severity === 'high' && !i.resolved
).length;
criticalCount += criticalIssuesCount;
highCount += highIssuesCount;
reportData.assetDetails.push({
assetId,
assetName: asset.name,
qualityScore: quality.overallScore,
dimensions: quality.dimensions,
openIssues: quality.issues.filter(i => !i.resolved).length,
criticalIssues: criticalIssuesCount,
highIssues: highIssuesCount,
lastAssessment: quality.lastAssessment
});
}
}
reportData.summary.averageScore = Math.round(totalScore / assetIds.length);
reportData.summary.criticalIssues = criticalCount;
reportData.summary.highIssues = highCount;
// Generate recommendations
reportData.recommendations = this.generateQualityRecommendations(reportData);
this.logger.info('Quality report generated', {
assetCount: assetIds.length,
averageScore: reportData.summary.averageScore,
criticalIssues: criticalCount
});
return reportData;
} catch (error) {
this.logger.error('Failed to generate quality report', { assetIds, error });
throw error;
}
}
// Private helper methods
private createEmptyQuality(assetId: string): DataQuality {
return {
id: this.generateId(),
assetId,
overallScore: 100,
dimensions: [],
rules: [],
issues: [],
trend: {
timeframe: 'week',
dataPoints: [],
trend: 'stable',
changeRate: 0
},
lastAssessment: new Date()
};
}
private async performQualityAssessment(
asset: DataAsset,
request: QualityAssessmentRequest
): Promise<{ dimensions: QualityDimension[] }> {
const dimensions: QualityDimension[] = [];
// Completeness assessment
if (request.checkCompleteness) {
const completeness = await this.assessCompleteness(asset);
dimensions.push(completeness);
}
// Accuracy assessment
if (request.checkAccuracy) {
const accuracy = await this.assessAccuracy(asset);
dimensions.push(accuracy);
}
// Consistency assessment
if (request.checkConsistency) {
const consistency = await this.assessConsistency(asset);
dimensions.push(consistency);
}
// Validity assessment
if (request.checkValidity) {
const validity = await this.assessValidity(asset);
dimensions.push(validity);
}
// Timeliness assessment
if (request.checkTimeliness) {
const timeliness = await this.assessTimeliness(asset);
dimensions.push(timeliness);
}
// Uniqueness assessment
if (request.checkUniqueness) {
const uniqueness = await this.assessUniqueness(asset);
dimensions.push(uniqueness);
}
return { dimensions };
}
private async assessCompleteness(asset: DataAsset): Promise<QualityDimension> {
// Mock implementation - in real scenario, this would analyze actual data
const score = Math.floor(Math.random() * 20) + 80; // 80-100
return {
name: 'completeness',
score,
description: 'Measures the degree to which data is complete',
rules: [`No null values in required fields`],
threshold: 95,
lastChecked: new Date()
};
}
private async assessAccuracy(asset: DataAsset): Promise<QualityDimension> {
const score = Math.floor(Math.random() * 15) + 85; // 85-100
return {
name: 'accuracy',
score,
description: 'Measures how well data represents real-world values',
rules: [`Values within expected ranges`, `Format validation`],
threshold: 90,
lastChecked: new Date()
};
}
private async assessConsistency(asset: DataAsset): Promise<QualityDimension> {
const score = Math.floor(Math.random() * 25) + 75; // 75-100
return {
name: 'consistency',
score,
description: 'Measures uniformity of data across datasets',
rules: [`Consistent data types`, `Standardized formats`],
threshold: 85,
lastChecked: new Date()
};
}
private async assessValidity(asset: DataAsset): Promise<QualityDimension> {
const score = Math.floor(Math.random() * 20) + 80; // 80-100
return {
name: 'validity',
score,
description: 'Measures conformity to defined business rules',
rules: [`Business rule compliance`, `Schema validation`],
threshold: 90,
lastChecked: new Date()
};
}
private async assessTimeliness(asset: DataAsset): Promise<QualityDimension> {
const score = Math.floor(Math.random() * 30) + 70; // 70-100
return {
name: 'timeliness',
score,
description: 'Measures how up-to-date the data is',
rules: [`Data refreshed within SLA`, `Timestamp validation`],
threshold: 85,
lastChecked: new Date()
};
}
private async assessUniqueness(asset: DataAsset): Promise<QualityDimension> {
const score = Math.floor(Math.random() * 25) + 75; // 75-100
return {
name: 'uniqueness',
score,
description: 'Measures absence of duplicate records',
rules: [`No duplicate primary keys`, `Unique constraints enforced`],
threshold: 95,
lastChecked: new Date()
};
}
private async executeQualityRule(asset: DataAsset, rule: QualityRule): Promise<boolean> {
// Mock implementation - in real scenario, this would execute the actual rule
// For demo purposes, randomly pass/fail rules
const passRate = rule.severity === 'critical' ? 0.9 : 0.95;
return Math.random() < passRate;
}
private calculateOverallScore(dimensions: QualityDimension[]): number {
if (dimensions.length === 0) return 100;
const totalScore = dimensions.reduce((sum, dim) => sum + dim.score, 0);
return Math.round(totalScore / dimensions.length);
}
private updateQualityTrend(quality: DataQuality, newScore: number): void {
quality.trend.dataPoints.push({
timestamp: new Date(),
value: newScore
});
// Keep only last 30 data points
if (quality.trend.dataPoints.length > 30) {
quality.trend.dataPoints = quality.trend.dataPoints.slice(-30);
}
// Update trend analysis
const trendAnalysis = this.analyzeTrend(quality.trend.dataPoints);
quality.trend.trend = trendAnalysis.direction;
quality.trend.changeRate = trendAnalysis.changeRate;
}
private filterTrendByTimeframe(trend: QualityTrend, timeframe: string): QualityTrend {
const now = new Date();
let cutoffDate: Date;
switch (timeframe) {
case 'day':
cutoffDate = new Date(now.getTime() - 24 * 60 * 60 * 1000);
break;
case 'week':
cutoffDate = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
break;
case 'month':
cutoffDate = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
break;
default:
cutoffDate = new Date(0); // All time
}
const filteredDataPoints = trend.dataPoints.filter(dp => dp.timestamp >= cutoffDate);
return {
...trend,
timeframe,
dataPoints: filteredDataPoints
};
}
private analyzeTrend(dataPoints: { timestamp: Date; value: number }[]): { direction: 'improving' | 'declining' | 'stable'; changeRate: number } {
if (dataPoints.length < 2) {
return { direction: 'stable', changeRate: 0 };
}
const values = dataPoints.map(dp => dp.value);
const firstValue = values[0];
const lastValue = values[values.length - 1];
const changeRate = ((lastValue - firstValue) / firstValue) * 100;
let direction: 'improving' | 'declining' | 'stable';
if (Math.abs(changeRate) < 2) {
direction = 'stable';
} else if (changeRate > 0) {
direction = 'improving';
} else {
direction = 'declining';
}
return { direction, changeRate: Math.round(changeRate * 100) / 100 };
}
private calculateAverageScore(qualities: DataQuality[]): number {
if (qualities.length === 0) return 0;
const totalScore = qualities.reduce((sum, quality) => sum + quality.overallScore, 0);
return Math.round(totalScore / qualities.length);
}
private calculateQualityDistribution(qualities: DataQuality[]): Record<string, number> {
const distribution = { excellent: 0, good: 0, fair: 0, poor: 0 };
qualities.forEach(quality => {
if (quality.overallScore >= 90) distribution.excellent++;
else if (quality.overallScore >= 80) distribution.good++;
else if (quality.overallScore >= 70) distribution.fair++;
else distribution.poor++;
});
return distribution;
}
private getTopQualityIssues(qualities: DataQuality[]): Array<{ type: string; count: number }> {
const issueTypes = new Map<string, number>();
qualities.forEach(quality => {
quality.issues.filter(issue => !issue.resolved).forEach(issue => {
issueTypes.set(issue.type, (issueTypes.get(issue.type) || 0) + 1);
});
});
return Array.from(issueTypes.entries())
.map(([type, count]) => ({ type, count }))
.sort((a, b) => b.count - a.count)
.slice(0, 5);
}
private getTrendSummary(qualities: DataQuality[]): Record<string, number> {
const trends = { improving: 0, declining: 0, stable: 0 };
qualities.forEach(quality => {
trends[quality.trend.trend]++;
});
return trends;
}
private calculateRuleCompliance(qualities: DataQuality[]): number {
let totalRules = 0;
let passedRules = 0;
qualities.forEach(quality => {
totalRules += quality.rules.length;
// Mock compliance calculation
passedRules += Math.floor(quality.rules.length * (quality.overallScore / 100));
});
return totalRules > 0 ? Math.round((passedRules / totalRules) * 100) : 100;
}
private generateQualityRecommendations(reportData: any): string[] {
const recommendations: string[] = [];
if (reportData.summary.averageScore < 80) {
recommendations.push('Overall data quality is below acceptable threshold. Consider implementing comprehensive data quality monitoring.');
}
if (reportData.summary.criticalIssues > 0) {
recommendations.push(`${reportData.summary.criticalIssues} critical quality issues require immediate attention.`);
}
if (reportData.summary.highIssues > 5) {
recommendations.push('High number of quality issues detected. Review data validation processes.');
}
// Asset-specific recommendations
const lowScoreAssets = reportData.assetDetails.filter((asset: any) => asset.qualityScore < 70);
if (lowScoreAssets.length > 0) {
recommendations.push(`${lowScoreAssets.length} assets have quality scores below 70% and need immediate remediation.`);
}
if (recommendations.length === 0) {
recommendations.push('Data quality is within acceptable ranges. Continue monitoring and maintain current practices.');
}
return recommendations;
}
private generateId(): string {
return `quality_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
// Method to inject assets (typically from DataCatalogService)
setAssets(assets: Map<string, DataAsset>): void {
this.assets = assets;
}
}

View file

@ -1,801 +0,0 @@
import { EventBus } from '@stock-bot/event-bus';
import { Logger } from '@stock-bot/utils';
import {
DataAsset,
SearchQuery,
SearchResult,
SearchFilters,
SearchSuggestion,
DataAssetType,
DataClassification
} from '../types/DataCatalog';
export interface SearchService {
search(query: SearchQuery): Promise<SearchResult>;
suggest(partial: string): Promise<SearchSuggestion[]>;
searchByFacets(facets: Record<string, string[]>): Promise<DataAsset[]>;
searchSimilar(assetId: string, limit?: number): Promise<DataAsset[]>;
getPopularSearches(limit?: number): Promise<string[]>;
getRecentSearches(userId: string, limit?: number): Promise<string[]>;
indexAsset(asset: DataAsset): Promise<void>;
removeFromIndex(assetId: string): Promise<void>;
reindexAll(): Promise<void>;
getSearchAnalytics(timeframe?: string): Promise<any>;
}
export class SearchServiceImpl implements SearchService {
private searchIndex: Map<string, DataAsset> = new Map();
private searchHistory: Array<{ query: string; userId?: string; timestamp: Date; resultCount: number }> = [];
private assets: Map<string, DataAsset> = new Map();
// In-memory inverted index for search
private wordToAssets: Map<string, Set<string>> = new Map();
private tagToAssets: Map<string, Set<string>> = new Map();
private typeToAssets: Map<string, Set<string>> = new Map();
private classificationToAssets: Map<string, Set<string>> = new Map();
private ownerToAssets: Map<string, Set<string>> = new Map();
constructor(
private eventBus: EventBus,
private logger: Logger
) {}
async search(query: SearchQuery): Promise<SearchResult> {
try {
const startTime = Date.now();
let results: DataAsset[] = [];
if (query.text) {
results = await this.performTextSearch(query.text);
} else {
results = Array.from(this.assets.values());
}
// Apply filters
if (query.filters) {
results = this.applyFilters(results, query.filters);
}
// Sort results
results = this.sortResults(results, query.sortBy, query.sortOrder);
// Apply pagination
const total = results.length;
const offset = query.offset || 0;
const limit = query.limit || 20;
const paginatedResults = results.slice(offset, offset + limit);
// Calculate facets
const facets = this.calculateFacets(results);
const searchTime = Date.now() - startTime;
const searchResult: SearchResult = {
assets: paginatedResults,
total,
offset,
limit,
searchTime,
facets,
suggestions: await this.generateSearchSuggestions(query.text || '', results)
};
// Record search in history
this.recordSearch(query.text || '', query.userId, total);
this.logger.info('Search completed', {
query: query.text,
resultCount: total,
searchTime
});
await this.eventBus.emit('data.catalog.search.performed', {
query,
resultCount: total,
searchTime,
timestamp: new Date()
});
return searchResult;
} catch (error) {
this.logger.error('Search failed', { query, error });
throw error;
}
}
async suggest(partial: string): Promise<SearchSuggestion[]> {
try {
const suggestions: SearchSuggestion[] = [];
const normalizedPartial = partial.toLowerCase().trim();
if (normalizedPartial.length < 2) {
return suggestions;
}
// Asset name suggestions
for (const asset of this.assets.values()) {
if (asset.name.toLowerCase().includes(normalizedPartial)) {
suggestions.push({
text: asset.name,
type: 'asset_name',
count: 1,
highlight: this.highlightMatch(asset.name, partial)
});
}
}
// Tag suggestions
const tagCounts = new Map<string, number>();
for (const asset of this.assets.values()) {
for (const tag of asset.tags) {
if (tag.toLowerCase().includes(normalizedPartial)) {
tagCounts.set(tag, (tagCounts.get(tag) || 0) + 1);
}
}
}
for (const [tag, count] of tagCounts) {
suggestions.push({
text: tag,
type: 'tag',
count,
highlight: this.highlightMatch(tag, partial)
});
}
// Owner suggestions
const ownerCounts = new Map<string, number>();
for (const asset of this.assets.values()) {
if (asset.owner.toLowerCase().includes(normalizedPartial)) {
ownerCounts.set(asset.owner, (ownerCounts.get(asset.owner) || 0) + 1);
}
}
for (const [owner, count] of ownerCounts) {
suggestions.push({
text: owner,
type: 'owner',
count,
highlight: this.highlightMatch(owner, partial)
});
}
// Popular search suggestions
const popularSearches = this.getPopularSearchTerms().filter(term =>
term.toLowerCase().includes(normalizedPartial)
);
for (const search of popularSearches.slice(0, 5)) {
suggestions.push({
text: search,
type: 'popular_search',
count: this.getSearchCount(search),
highlight: this.highlightMatch(search, partial)
});
}
// Sort by relevance and count
return suggestions
.sort((a, b) => {
// Prefer exact matches
const aExact = a.text.toLowerCase().startsWith(normalizedPartial) ? 1 : 0;
const bExact = b.text.toLowerCase().startsWith(normalizedPartial) ? 1 : 0;
if (aExact !== bExact) return bExact - aExact;
// Then by count
return b.count - a.count;
})
.slice(0, 10);
} catch (error) {
this.logger.error('Suggestion generation failed', { partial, error });
throw error;
}
}
async searchByFacets(facets: Record<string, string[]>): Promise<DataAsset[]> {
try {
let results: Set<string> = new Set();
let isFirstFacet = true;
for (const [facetType, values] of Object.entries(facets)) {
const facetResults = new Set<string>();
for (const value of values) {
let assetIds: Set<string> | undefined;
switch (facetType) {
case 'type':
assetIds = this.typeToAssets.get(value);
break;
case 'classification':
assetIds = this.classificationToAssets.get(value);
break;
case 'owner':
assetIds = this.ownerToAssets.get(value);
break;
case 'tags':
assetIds = this.tagToAssets.get(value);
break;
}
if (assetIds) {
for (const assetId of assetIds) {
facetResults.add(assetId);
}
}
}
if (isFirstFacet) {
results = facetResults;
isFirstFacet = false;
} else {
// Intersection of results
results = new Set([...results].filter(id => facetResults.has(id)));
}
}
const assets = Array.from(results)
.map(id => this.assets.get(id))
.filter((asset): asset is DataAsset => asset !== undefined);
this.logger.info('Facet search completed', {
facets,
resultCount: assets.length
});
return assets;
} catch (error) {
this.logger.error('Facet search failed', { facets, error });
throw error;
}
}
async searchSimilar(assetId: string, limit: number = 10): Promise<DataAsset[]> {
try {
const targetAsset = this.assets.get(assetId);
if (!targetAsset) {
return [];
}
const similarities: Array<{ asset: DataAsset; score: number }> = [];
for (const asset of this.assets.values()) {
if (asset.id === assetId) continue;
const score = this.calculateSimilarity(targetAsset, asset);
if (score > 0.1) { // Minimum similarity threshold
similarities.push({ asset, score });
}
}
// Sort by similarity score and return top results
const results = similarities
.sort((a, b) => b.score - a.score)
.slice(0, limit)
.map(item => item.asset);
this.logger.info('Similar assets found', {
assetId,
similarCount: results.length
});
return results;
} catch (error) {
this.logger.error('Similar asset search failed', { assetId, error });
throw error;
}
}
async getPopularSearches(limit: number = 10): Promise<string[]> {
try {
const searchCounts = new Map<string, number>();
// Count search frequency
for (const search of this.searchHistory) {
if (search.query) {
searchCounts.set(search.query, (searchCounts.get(search.query) || 0) + 1);
}
}
// Sort by frequency and return top searches
return Array.from(searchCounts.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, limit)
.map(([query]) => query);
} catch (error) {
this.logger.error('Failed to get popular searches', { error });
throw error;
}
}
async getRecentSearches(userId: string, limit: number = 10): Promise<string[]> {
try {
return this.searchHistory
.filter(search => search.userId === userId && search.query)
.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime())
.slice(0, limit)
.map(search => search.query);
} catch (error) {
this.logger.error('Failed to get recent searches', { userId, error });
throw error;
}
}
async indexAsset(asset: DataAsset): Promise<void> {
try {
// Add to main index
this.searchIndex.set(asset.id, asset);
this.assets.set(asset.id, asset);
// Update inverted indices
this.updateInvertedIndices(asset);
this.logger.debug('Asset indexed', { assetId: asset.id, name: asset.name });
await this.eventBus.emit('data.catalog.asset.indexed', {
assetId: asset.id,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to index asset', { asset, error });
throw error;
}
}
async removeFromIndex(assetId: string): Promise<void> {
try {
const asset = this.searchIndex.get(assetId);
if (!asset) {
return;
}
// Remove from main index
this.searchIndex.delete(assetId);
this.assets.delete(assetId);
// Remove from inverted indices
this.removeFromInvertedIndices(asset);
this.logger.debug('Asset removed from index', { assetId });
await this.eventBus.emit('data.catalog.asset.unindexed', {
assetId,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to remove asset from index', { assetId, error });
throw error;
}
}
async reindexAll(): Promise<void> {
try {
// Clear all indices
this.searchIndex.clear();
this.wordToAssets.clear();
this.tagToAssets.clear();
this.typeToAssets.clear();
this.classificationToAssets.clear();
this.ownerToAssets.clear();
// Reindex all assets
for (const asset of this.assets.values()) {
await this.indexAsset(asset);
}
this.logger.info('Search index rebuilt', { assetCount: this.assets.size });
await this.eventBus.emit('data.catalog.index.rebuilt', {
assetCount: this.assets.size,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to rebuild search index', { error });
throw error;
}
}
async getSearchAnalytics(timeframe: string = 'week'): Promise<any> {
try {
const now = new Date();
let cutoffDate: Date;
switch (timeframe) {
case 'day':
cutoffDate = new Date(now.getTime() - 24 * 60 * 60 * 1000);
break;
case 'week':
cutoffDate = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
break;
case 'month':
cutoffDate = new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
break;
default:
cutoffDate = new Date(0);
}
const recentSearches = this.searchHistory.filter(search => search.timestamp >= cutoffDate);
const analytics = {
totalSearches: recentSearches.length,
uniqueQueries: new Set(recentSearches.map(s => s.query)).size,
averageResults: recentSearches.length > 0 ?
recentSearches.reduce((sum, s) => sum + s.resultCount, 0) / recentSearches.length : 0,
noResultQueries: recentSearches.filter(s => s.resultCount === 0).length,
topQueries: this.getTopQueries(recentSearches, 10),
searchTrend: this.calculateSearchTrend(recentSearches, timeframe),
facetUsage: this.getFacetUsage(recentSearches)
};
return analytics;
} catch (error) {
this.logger.error('Failed to get search analytics', { timeframe, error });
throw error;
}
}
// Private helper methods
private async performTextSearch(text: string): Promise<DataAsset[]> {
const words = this.tokenize(text);
const assetScores = new Map<string, number>();
for (const word of words) {
const assetIds = this.wordToAssets.get(word) || new Set();
for (const assetId of assetIds) {
assetScores.set(assetId, (assetScores.get(assetId) || 0) + 1);
}
}
// Sort by relevance score
const sortedAssetIds = Array.from(assetScores.entries())
.sort((a, b) => b[1] - a[1])
.map(([assetId]) => assetId);
return sortedAssetIds
.map(id => this.assets.get(id))
.filter((asset): asset is DataAsset => asset !== undefined);
}
private applyFilters(assets: DataAsset[], filters: SearchFilters): DataAsset[] {
return assets.filter(asset => {
if (filters.types && filters.types.length > 0) {
if (!filters.types.includes(asset.type)) return false;
}
if (filters.classifications && filters.classifications.length > 0) {
if (!filters.classifications.includes(asset.classification)) return false;
}
if (filters.owners && filters.owners.length > 0) {
if (!filters.owners.includes(asset.owner)) return false;
}
if (filters.tags && filters.tags.length > 0) {
if (!filters.tags.some(tag => asset.tags.includes(tag))) return false;
}
if (filters.createdAfter) {
if (asset.createdAt < filters.createdAfter) return false;
}
if (filters.createdBefore) {
if (asset.createdAt > filters.createdBefore) return false;
}
return true;
});
}
private sortResults(assets: DataAsset[], sortBy?: string, sortOrder?: 'asc' | 'desc'): DataAsset[] {
if (!sortBy) {
return assets; // Return as-is (relevance order)
}
const order = sortOrder === 'desc' ? -1 : 1;
return assets.sort((a, b) => {
let comparison = 0;
switch (sortBy) {
case 'name':
comparison = a.name.localeCompare(b.name);
break;
case 'createdAt':
comparison = a.createdAt.getTime() - b.createdAt.getTime();
break;
case 'updatedAt':
comparison = a.updatedAt.getTime() - b.updatedAt.getTime();
break;
case 'lastAccessed':
const aAccessed = a.lastAccessed?.getTime() || 0;
const bAccessed = b.lastAccessed?.getTime() || 0;
comparison = aAccessed - bAccessed;
break;
case 'usage':
comparison = a.usage.accessCount - b.usage.accessCount;
break;
default:
comparison = 0;
}
return comparison * order;
});
}
private calculateFacets(assets: DataAsset[]): Record<string, Array<{ value: string; count: number }>> {
const facets: Record<string, Map<string, number>> = {
types: new Map(),
classifications: new Map(),
owners: new Map(),
tags: new Map()
};
for (const asset of assets) {
// Type facet
facets.types.set(asset.type, (facets.types.get(asset.type) || 0) + 1);
// Classification facet
facets.classifications.set(asset.classification, (facets.classifications.get(asset.classification) || 0) + 1);
// Owner facet
facets.owners.set(asset.owner, (facets.owners.get(asset.owner) || 0) + 1);
// Tags facet
for (const tag of asset.tags) {
facets.tags.set(tag, (facets.tags.get(tag) || 0) + 1);
}
}
// Convert to required format
const result: Record<string, Array<{ value: string; count: number }>> = {};
for (const [facetName, facetMap] of Object.entries(facets)) {
result[facetName] = Array.from(facetMap.entries())
.map(([value, count]) => ({ value, count }))
.sort((a, b) => b.count - a.count);
}
return result;
}
private async generateSearchSuggestions(query: string, results: DataAsset[]): Promise<string[]> {
if (!query || results.length === 0) {
return [];
}
const suggestions: string[] = [];
// Extract common tags from results
const tagCounts = new Map<string, number>();
for (const asset of results.slice(0, 10)) { // Top 10 results
for (const tag of asset.tags) {
tagCounts.set(tag, (tagCounts.get(tag) || 0) + 1);
}
}
// Add top tags as suggestions
const topTags = Array.from(tagCounts.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 3)
.map(([tag]) => `${query} ${tag}`);
suggestions.push(...topTags);
return suggestions;
}
private updateInvertedIndices(asset: DataAsset): void {
// Index words from name and description
const words = [
...this.tokenize(asset.name),
...this.tokenize(asset.description)
];
for (const word of words) {
if (!this.wordToAssets.has(word)) {
this.wordToAssets.set(word, new Set());
}
this.wordToAssets.get(word)!.add(asset.id);
}
// Index tags
for (const tag of asset.tags) {
if (!this.tagToAssets.has(tag)) {
this.tagToAssets.set(tag, new Set());
}
this.tagToAssets.get(tag)!.add(asset.id);
}
// Index type
if (!this.typeToAssets.has(asset.type)) {
this.typeToAssets.set(asset.type, new Set());
}
this.typeToAssets.get(asset.type)!.add(asset.id);
// Index classification
if (!this.classificationToAssets.has(asset.classification)) {
this.classificationToAssets.set(asset.classification, new Set());
}
this.classificationToAssets.get(asset.classification)!.add(asset.id);
// Index owner
if (!this.ownerToAssets.has(asset.owner)) {
this.ownerToAssets.set(asset.owner, new Set());
}
this.ownerToAssets.get(asset.owner)!.add(asset.id);
}
private removeFromInvertedIndices(asset: DataAsset): void {
// Remove from word index
const words = [
...this.tokenize(asset.name),
...this.tokenize(asset.description)
];
for (const word of words) {
const assetSet = this.wordToAssets.get(word);
if (assetSet) {
assetSet.delete(asset.id);
if (assetSet.size === 0) {
this.wordToAssets.delete(word);
}
}
}
// Remove from other indices
this.removeFromIndex(this.tagToAssets, asset.tags, asset.id);
this.removeFromIndex(this.typeToAssets, [asset.type], asset.id);
this.removeFromIndex(this.classificationToAssets, [asset.classification], asset.id);
this.removeFromIndex(this.ownerToAssets, [asset.owner], asset.id);
}
private removeFromIndex(index: Map<string, Set<string>>, values: string[], assetId: string): void {
for (const value of values) {
const assetSet = index.get(value);
if (assetSet) {
assetSet.delete(assetId);
if (assetSet.size === 0) {
index.delete(value);
}
}
}
}
private tokenize(text: string): string[] {
return text
.toLowerCase()
.replace(/[^\w\s]/g, ' ')
.split(/\s+/)
.filter(word => word.length > 2);
}
private calculateSimilarity(asset1: DataAsset, asset2: DataAsset): number {
let score = 0;
// Type similarity
if (asset1.type === asset2.type) score += 0.3;
// Classification similarity
if (asset1.classification === asset2.classification) score += 0.2;
// Owner similarity
if (asset1.owner === asset2.owner) score += 0.1;
// Tag similarity (Jaccard similarity)
const tags1 = new Set(asset1.tags);
const tags2 = new Set(asset2.tags);
const intersection = new Set([...tags1].filter(tag => tags2.has(tag)));
const union = new Set([...tags1, ...tags2]);
if (union.size > 0) {
score += (intersection.size / union.size) * 0.4;
}
return score;
}
private highlightMatch(text: string, query: string): string {
const regex = new RegExp(`(${query})`, 'gi');
return text.replace(regex, '<mark>$1</mark>');
}
private recordSearch(query: string, userId?: string, resultCount: number = 0): void {
this.searchHistory.push({
query,
userId,
timestamp: new Date(),
resultCount
});
// Keep only last 1000 searches
if (this.searchHistory.length > 1000) {
this.searchHistory = this.searchHistory.slice(-1000);
}
}
private getPopularSearchTerms(): string[] {
const searchCounts = new Map<string, number>();
for (const search of this.searchHistory) {
if (search.query) {
searchCounts.set(search.query, (searchCounts.get(search.query) || 0) + 1);
}
}
return Array.from(searchCounts.entries())
.sort((a, b) => b[1] - a[1])
.map(([query]) => query);
}
private getSearchCount(query: string): number {
return this.searchHistory.filter(search => search.query === query).length;
}
private getTopQueries(searches: any[], limit: number): Array<{ query: string; count: number }> {
const queryCounts = new Map<string, number>();
for (const search of searches) {
if (search.query) {
queryCounts.set(search.query, (queryCounts.get(search.query) || 0) + 1);
}
}
return Array.from(queryCounts.entries())
.map(([query, count]) => ({ query, count }))
.sort((a, b) => b.count - a.count)
.slice(0, limit);
}
private calculateSearchTrend(searches: any[], timeframe: string): any {
// Group searches by day
const dailyCounts = new Map<string, number>();
for (const search of searches) {
const day = search.timestamp.toISOString().split('T')[0];
dailyCounts.set(day, (dailyCounts.get(day) || 0) + 1);
}
const dataPoints = Array.from(dailyCounts.entries())
.map(([date, count]) => ({ date, count }))
.sort((a, b) => a.date.localeCompare(b.date));
return {
dataPoints,
trend: this.analyzeTrend(dataPoints.map(p => p.count))
};
}
private analyzeTrend(values: number[]): string {
if (values.length < 2) return 'stable';
const firstHalf = values.slice(0, Math.floor(values.length / 2));
const secondHalf = values.slice(Math.floor(values.length / 2));
const firstAvg = firstHalf.reduce((sum, val) => sum + val, 0) / firstHalf.length;
const secondAvg = secondHalf.reduce((sum, val) => sum + val, 0) / secondHalf.length;
const changePercent = ((secondAvg - firstAvg) / firstAvg) * 100;
if (Math.abs(changePercent) < 10) return 'stable';
return changePercent > 0 ? 'increasing' : 'decreasing';
}
private getFacetUsage(searches: any[]): Record<string, number> {
// Mock facet usage tracking
return {
types: Math.floor(searches.length * 0.3),
classifications: Math.floor(searches.length * 0.2),
owners: Math.floor(searches.length * 0.1),
tags: Math.floor(searches.length * 0.4)
};
}
// Method to inject assets (typically from DataCatalogService)
setAssets(assets: Map<string, DataAsset>): void {
this.assets = assets;
// Reindex all assets when assets are updated
this.reindexAll();
}
}

View file

@ -1,524 +0,0 @@
// Data Asset Types
export interface DataAsset {
id: string;
name: string;
type: DataAssetType;
description: string;
owner: string;
steward?: string;
tags: string[];
classification: DataClassification;
schema?: DataSchema;
location: DataLocation;
metadata: DataAssetMetadata;
lineage: DataLineage;
quality: DataQuality;
usage: DataUsage;
governance: DataGovernance;
createdAt: Date;
updatedAt: Date;
lastAccessed?: Date;
}
export enum DataAssetType {
TABLE = 'table',
VIEW = 'view',
DATASET = 'dataset',
API = 'api',
FILE = 'file',
STREAM = 'stream',
MODEL = 'model',
FEATURE_GROUP = 'feature_group',
PIPELINE = 'pipeline',
REPORT = 'report'
}
export enum DataClassification {
PUBLIC = 'public',
INTERNAL = 'internal',
CONFIDENTIAL = 'confidential',
RESTRICTED = 'restricted',
PII = 'pii',
FINANCIAL = 'financial'
}
export interface DataSchema {
version: string;
fields: DataField[];
primaryKeys?: string[];
foreignKeys?: ForeignKey[];
indexes?: Index[];
}
export interface DataField {
name: string;
type: string;
nullable: boolean;
description?: string;
constraints?: FieldConstraint[];
tags?: string[];
classification?: DataClassification;
}
export interface ForeignKey {
fields: string[];
referencedAsset: string;
referencedFields: string[];
}
export interface Index {
name: string;
fields: string[];
unique: boolean;
type: 'btree' | 'hash' | 'gin' | 'gist';
}
export interface FieldConstraint {
type: 'not_null' | 'unique' | 'check' | 'range' | 'pattern';
value?: any;
description?: string;
}
export interface DataLocation {
type: 'database' | 'file_system' | 'cloud_storage' | 'api' | 'stream';
connection: string;
path: string;
format?: string;
compression?: string;
partitioning?: PartitionInfo;
}
export interface PartitionInfo {
fields: string[];
strategy: 'range' | 'hash' | 'list';
count?: number;
}
export interface DataAssetMetadata {
size?: number;
rowCount?: number;
columnCount?: number;
fileFormat?: string;
encoding?: string;
delimiter?: string;
compression?: string;
checksums?: Record<string, string>;
customProperties?: Record<string, any>;
}
// Data Lineage Types
export interface DataLineage {
id: string;
assetId: string;
upstreamAssets: LineageEdge[];
downstreamAssets: LineageEdge[];
transformations: DataTransformation[];
impact: ImpactAnalysis;
createdAt: Date;
updatedAt: Date;
}
export interface LineageEdge {
sourceAssetId: string;
targetAssetId: string;
relationship: LineageRelationship;
transformations: string[];
confidence: number;
metadata?: Record<string, any>;
}
export enum LineageRelationship {
DERIVED_FROM = 'derived_from',
AGGREGATED_FROM = 'aggregated_from',
JOINED_WITH = 'joined_with',
FILTERED_FROM = 'filtered_from',
TRANSFORMED_FROM = 'transformed_from',
COPIED_FROM = 'copied_from',
ENRICHED_WITH = 'enriched_with'
}
export interface DataTransformation {
id: string;
name: string;
type: TransformationType;
description?: string;
code?: string;
inputFields: string[];
outputFields: string[];
logic: string;
parameters?: Record<string, any>;
}
export enum TransformationType {
FILTER = 'filter',
AGGREGATE = 'aggregate',
JOIN = 'join',
UNION = 'union',
PIVOT = 'pivot',
UNPIVOT = 'unpivot',
SORT = 'sort',
DEDUPLICATE = 'deduplicate',
CALCULATE = 'calculate',
CAST = 'cast',
RENAME = 'rename'
}
export interface ImpactAnalysis {
downstreamAssets: string[];
affectedUsers: string[];
estimatedImpact: 'low' | 'medium' | 'high' | 'critical';
impactDescription: string;
recommendations: string[];
}
// Data Quality Types
export interface DataQuality {
id: string;
assetId: string;
overallScore: number;
dimensions: QualityDimension[];
rules: QualityRule[];
issues: QualityIssue[];
trend: QualityTrend;
lastAssessment: Date;
nextAssessment?: Date;
}
export interface QualityDimension {
name: QualityDimensionType;
score: number;
weight: number;
description: string;
metrics: QualityMetric[];
}
export enum QualityDimensionType {
COMPLETENESS = 'completeness',
ACCURACY = 'accuracy',
CONSISTENCY = 'consistency',
VALIDITY = 'validity',
UNIQUENESS = 'uniqueness',
TIMELINESS = 'timeliness',
INTEGRITY = 'integrity'
}
export interface QualityRule {
id: string;
name: string;
description: string;
dimension: QualityDimensionType;
type: QualityRuleType;
field?: string;
condition: string;
threshold: number;
severity: 'low' | 'medium' | 'high' | 'critical';
enabled: boolean;
}
export enum QualityRuleType {
NULL_CHECK = 'null_check',
RANGE_CHECK = 'range_check',
PATTERN_CHECK = 'pattern_check',
REFERENCE_CHECK = 'reference_check',
DUPLICATE_CHECK = 'duplicate_check',
FRESHNESS_CHECK = 'freshness_check',
CUSTOM = 'custom'
}
export interface QualityMetric {
name: string;
value: number;
unit?: string;
threshold?: number;
status: 'pass' | 'warn' | 'fail';
}
export interface QualityIssue {
id: string;
ruleId: string;
severity: 'low' | 'medium' | 'high' | 'critical';
description: string;
field?: string;
affectedRows?: number;
detectedAt: Date;
status: 'open' | 'acknowledged' | 'resolved' | 'false_positive';
assignee?: string;
resolution?: string;
resolvedAt?: Date;
}
export interface QualityTrend {
timeframe: 'day' | 'week' | 'month';
dataPoints: QualityDataPoint[];
trend: 'improving' | 'stable' | 'degrading';
changeRate: number;
}
export interface QualityDataPoint {
timestamp: Date;
score: number;
dimensionScores: Record<QualityDimensionType, number>;
}
// Data Usage Types
export interface DataUsage {
id: string;
assetId: string;
accessCount: number;
uniqueUsers: number;
lastAccessed: Date;
topUsers: UserUsage[];
accessPatterns: AccessPattern[];
popularQueries: PopularQuery[];
usageTrend: UsageTrend;
}
export interface UserUsage {
userId: string;
userName: string;
accessCount: number;
lastAccessed: Date;
accessType: 'read' | 'write' | 'query' | 'download';
}
export interface AccessPattern {
timeOfDay: number; // Hour 0-23
dayOfWeek: number; // 0-6
frequency: number;
accessType: 'read' | 'write' | 'query' | 'download';
}
export interface PopularQuery {
query: string;
count: number;
avgExecutionTime: number;
lastExecuted: Date;
users: string[];
}
export interface UsageTrend {
timeframe: 'day' | 'week' | 'month';
dataPoints: UsageDataPoint[];
trend: 'increasing' | 'stable' | 'decreasing';
changeRate: number;
}
export interface UsageDataPoint {
timestamp: Date;
accessCount: number;
uniqueUsers: number;
avgResponseTime?: number;
}
// Data Governance Types
export interface DataGovernance {
id: string;
assetId: string;
policies: GovernancePolicy[];
compliance: ComplianceStatus[];
retention: RetentionPolicy;
access: AccessPolicy;
privacy: PrivacySettings;
audit: AuditTrail[];
}
export interface GovernancePolicy {
id: string;
name: string;
type: PolicyType;
description: string;
rules: PolicyRule[];
enforcement: 'advisory' | 'preventive' | 'detective';
status: 'active' | 'inactive' | 'draft';
}
export enum PolicyType {
ACCESS_CONTROL = 'access_control',
DATA_RETENTION = 'data_retention',
DATA_PRIVACY = 'data_privacy',
DATA_QUALITY = 'data_quality',
USAGE_MONITORING = 'usage_monitoring',
COMPLIANCE = 'compliance'
}
export interface PolicyRule {
id: string;
condition: string;
action: string;
parameters?: Record<string, any>;
}
export interface ComplianceStatus {
regulation: 'GDPR' | 'CCPA' | 'SOX' | 'HIPAA' | 'PCI_DSS' | 'CUSTOM';
status: 'compliant' | 'non_compliant' | 'unknown';
lastAssessment: Date;
issues: ComplianceIssue[];
}
export interface ComplianceIssue {
id: string;
description: string;
severity: 'low' | 'medium' | 'high' | 'critical';
requirement: string;
remediation: string;
dueDate?: Date;
}
export interface RetentionPolicy {
retentionPeriod: number; // in days
archiveAfter?: number; // in days
deleteAfter?: number; // in days
retentionReason: string;
legalHold: boolean;
}
export interface AccessPolicy {
defaultAccess: 'none' | 'read' | 'write' | 'admin';
roles: RolePermission[];
users: UserPermission[];
conditions?: AccessCondition[];
}
export interface RolePermission {
role: string;
permissions: Permission[];
conditions?: AccessCondition[];
}
export interface UserPermission {
userId: string;
permissions: Permission[];
conditions?: AccessCondition[];
expiresAt?: Date;
}
export enum Permission {
READ = 'read',
WRITE = 'write',
DELETE = 'delete',
ADMIN = 'admin',
QUERY = 'query',
EXPORT = 'export'
}
export interface AccessCondition {
type: 'time_based' | 'location_based' | 'purpose_based' | 'data_sensitivity';
condition: string;
value: any;
}
export interface PrivacySettings {
containsPII: boolean;
sensitiveFields: string[];
anonymizationRules: AnonymizationRule[];
consentRequired: boolean;
dataSubjectRights: DataSubjectRight[];
}
export interface AnonymizationRule {
field: string;
method: 'mask' | 'hash' | 'encrypt' | 'tokenize' | 'generalize' | 'suppress';
parameters?: Record<string, any>;
}
export interface DataSubjectRight {
type: 'access' | 'rectification' | 'erasure' | 'portability' | 'restriction';
enabled: boolean;
automatedResponse: boolean;
}
export interface AuditTrail {
id: string;
timestamp: Date;
userId: string;
action: string;
resource: string;
details: Record<string, any>;
outcome: 'success' | 'failure';
ipAddress?: string;
userAgent?: string;
}
// Search and Discovery Types
export interface SearchRequest {
query: string;
filters?: SearchFilter[];
facets?: string[];
sortBy?: string;
sortOrder?: 'asc' | 'desc';
limit?: number;
offset?: number;
}
export interface SearchFilter {
field: string;
operator: 'eq' | 'ne' | 'gt' | 'gte' | 'lt' | 'lte' | 'in' | 'contains' | 'startswith' | 'endswith';
value: any;
}
export interface SearchResponse {
total: number;
assets: DataAsset[];
facets: SearchFacet[];
suggestions: string[];
}
export interface SearchFacet {
field: string;
values: FacetValue[];
}
export interface FacetValue {
value: string;
count: number;
}
// API Request/Response Types
export interface CreateDataAssetRequest {
name: string;
type: DataAssetType;
description: string;
owner: string;
steward?: string;
tags?: string[];
classification: DataClassification;
schema?: DataSchema;
location: DataLocation;
metadata?: Partial<DataAssetMetadata>;
governance?: Partial<DataGovernance>;
}
export interface UpdateDataAssetRequest {
name?: string;
description?: string;
owner?: string;
steward?: string;
tags?: string[];
classification?: DataClassification;
schema?: DataSchema;
metadata?: Partial<DataAssetMetadata>;
}
export interface LineageRequest {
assetId: string;
direction: 'upstream' | 'downstream' | 'both';
depth?: number;
includeTransformations?: boolean;
}
export interface QualityAssessmentRequest {
assetId: string;
rules?: string[];
immediate?: boolean;
}
export interface CreateQualityRuleRequest {
name: string;
description: string;
dimension: QualityDimensionType;
type: QualityRuleType;
field?: string;
condition: string;
threshold: number;
severity: 'low' | 'medium' | 'high' | 'critical';
}

View file

@ -1,20 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"baseUrl": "./src",
"paths": {
"@/*": ["*"]
}
},
"include": [
"src/**/*"
],
"exclude": [
"node_modules",
"dist",
"**/*.test.ts",
"**/*.spec.ts"
]
}

View file

@ -1,34 +0,0 @@
{
"name": "data-processor",
"version": "1.0.0",
"description": "Data processing and pipeline orchestration service",
"main": "src/index.ts", "scripts": {
"dev": "bun run --watch src/index.ts",
"start": "bun run src/index.ts",
"build": "bun build src/index.ts --outdir=dist --target=bun",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit"
},"dependencies": {
"@stock-bot/types": "*",
"@stock-bot/event-bus": "*",
"@stock-bot/utils": "*",
"@stock-bot/logger": "*",
"@stock-bot/api-client": "*",
"hono": "^4.6.3",
"ioredis": "^5.4.1",
"cron": "^3.1.6",
"bull": "^4.12.2",
"axios": "^1.6.2",
"node-fetch": "^3.3.2",
"csv-parser": "^3.0.0",
"joi": "^17.11.0"
},
"devDependencies": {
"bun-types": "^1.2.15",
"@types/node": "^20.10.5",
"@types/bull": "^4.10.0",
"typescript": "^5.3.3",
"eslint": "^8.56.0"
}
}

View file

@ -1,106 +0,0 @@
import { Context } from 'hono';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('HealthController');
export class HealthController {
async getHealth(c: Context): Promise<Response> {
try {
const health = {
status: 'healthy',
timestamp: new Date().toISOString(),
service: 'data-processor',
version: process.env.npm_package_version || '1.0.0',
uptime: process.uptime(),
environment: process.env.NODE_ENV || 'development',
dependencies: {
redis: await this.checkRedisHealth(),
eventBus: await this.checkEventBusHealth(),
}
};
return c.json(health);
} catch (error) {
logger.error('Health check failed:', error);
return c.json({
status: 'unhealthy',
timestamp: new Date().toISOString(),
service: 'data-processor',
error: error instanceof Error ? error.message : 'Unknown error'
}, 503);
}
}
async getDetailedHealth(c: Context): Promise<Response> {
try {
const health = {
status: 'healthy',
timestamp: new Date().toISOString(),
service: 'data-processor',
version: process.env.npm_package_version || '1.0.0',
uptime: process.uptime(),
environment: process.env.NODE_ENV || 'development',
system: {
platform: process.platform,
architecture: process.arch,
nodeVersion: process.version,
memory: process.memoryUsage(),
pid: process.pid
},
dependencies: {
redis: await this.checkRedisHealth(),
eventBus: await this.checkEventBusHealth(),
},
metrics: {
activePipelines: 0, // Will be populated by orchestrator
runningJobs: 0, // Will be populated by orchestrator
totalProcessedRecords: 0 // Will be populated by orchestrator
}
};
return c.json(health);
} catch (error) {
logger.error('Detailed health check failed:', error);
return c.json({
status: 'unhealthy',
timestamp: new Date().toISOString(),
service: 'data-processor',
error: error instanceof Error ? error.message : 'Unknown error'
}, 503);
}
}
private async checkRedisHealth(): Promise<{ status: string; latency?: number; error?: string }> {
try {
const startTime = Date.now();
// In a real implementation, ping Redis here
const latency = Date.now() - startTime;
return {
status: 'healthy',
latency
};
} catch (error) {
return {
status: 'unhealthy',
error: error instanceof Error ? error.message : 'Redis connection failed'
};
}
}
private async checkEventBusHealth(): Promise<{ status: string; error?: string }> {
try {
// In a real implementation, check event bus connection here
return {
status: 'healthy'
};
} catch (error) {
return {
status: 'unhealthy',
error: error instanceof Error ? error.message : 'Event bus connection failed'
};
}
}
}

View file

@ -1,299 +0,0 @@
import { Context } from 'hono';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('JobController');
import { DataPipelineOrchestrator } from '../core/DataPipelineOrchestrator';
import { JobStatus } from '../types/DataPipeline';
export class JobController {
constructor(private orchestrator: DataPipelineOrchestrator) {}
async listJobs(c: Context): Promise<Response> {
try {
const pipelineId = c.req.query('pipelineId');
const status = c.req.query('status') as JobStatus;
const limit = parseInt(c.req.query('limit') || '50');
const offset = parseInt(c.req.query('offset') || '0');
let jobs = this.orchestrator.listJobs(pipelineId);
// Filter by status if provided
if (status) {
jobs = jobs.filter(job => job.status === status);
}
// Sort by creation time (newest first)
jobs.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
// Apply pagination
const totalJobs = jobs.length;
const paginatedJobs = jobs.slice(offset, offset + limit);
return c.json({
success: true,
data: paginatedJobs,
pagination: {
total: totalJobs,
limit,
offset,
hasMore: offset + limit < totalJobs
}
});
} catch (error) {
logger.error('Failed to list jobs:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to list jobs'
}, 500);
}
}
async getJob(c: Context): Promise<Response> {
try {
const jobId = c.req.param('id');
const job = this.orchestrator.getJob(jobId);
if (!job) {
return c.json({
success: false,
error: 'Job not found'
}, 404);
}
return c.json({
success: true,
data: job
});
} catch (error) {
logger.error('Failed to get job:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to get job'
}, 500);
}
}
async cancelJob(c: Context): Promise<Response> {
try {
const jobId = c.req.param('id');
const job = this.orchestrator.getJob(jobId);
if (!job) {
return c.json({
success: false,
error: 'Job not found'
}, 404);
}
if (job.status !== JobStatus.RUNNING && job.status !== JobStatus.PENDING) {
return c.json({
success: false,
error: 'Job cannot be cancelled in current status'
}, 400);
}
// Update job status to cancelled
job.status = JobStatus.CANCELLED;
job.completedAt = new Date();
job.error = 'Job cancelled by user';
logger.info(`Cancelled job: ${jobId}`);
return c.json({
success: true,
message: 'Job cancelled successfully',
data: job
});
} catch (error) {
logger.error('Failed to cancel job:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to cancel job'
}, 500);
}
}
async retryJob(c: Context): Promise<Response> {
try {
const jobId = c.req.param('id');
const job = this.orchestrator.getJob(jobId);
if (!job) {
return c.json({
success: false,
error: 'Job not found'
}, 404);
}
if (job.status !== JobStatus.FAILED) {
return c.json({
success: false,
error: 'Only failed jobs can be retried'
}, 400);
}
// Create a new job with the same parameters
const newJob = await this.orchestrator.runPipeline(job.pipelineId, job.parameters);
logger.info(`Retried job: ${jobId} as new job: ${newJob.id}`);
return c.json({
success: true,
message: 'Job retried successfully',
data: {
originalJob: job,
newJob: newJob
}
});
} catch (error) {
logger.error('Failed to retry job:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to retry job'
}, 500);
}
}
async getJobLogs(c: Context): Promise<Response> {
try {
const jobId = c.req.param('id');
const job = this.orchestrator.getJob(jobId);
if (!job) {
return c.json({
success: false,
error: 'Job not found'
}, 404);
}
// In a real implementation, fetch logs from a log store
const logs = [
{
timestamp: job.createdAt,
level: 'info',
message: `Job ${jobId} created`
},
...(job.startedAt ? [{
timestamp: job.startedAt,
level: 'info',
message: `Job ${jobId} started`
}] : []),
...(job.completedAt ? [{
timestamp: job.completedAt,
level: job.status === JobStatus.COMPLETED ? 'info' : 'error',
message: job.status === JobStatus.COMPLETED ?
`Job ${jobId} completed successfully` :
`Job ${jobId} failed: ${job.error}`
}] : [])
];
return c.json({
success: true,
data: {
jobId,
logs,
totalLogs: logs.length
}
});
} catch (error) {
logger.error('Failed to get job logs:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to get job logs'
}, 500);
}
}
async getJobMetrics(c: Context): Promise<Response> {
try {
const jobId = c.req.param('id');
const job = this.orchestrator.getJob(jobId);
if (!job) {
return c.json({
success: false,
error: 'Job not found'
}, 404);
}
const metrics = {
...job.metrics,
duration: job.completedAt && job.startedAt ?
job.completedAt.getTime() - job.startedAt.getTime() : null,
successRate: job.metrics.recordsProcessed > 0 ?
(job.metrics.recordsSuccessful / job.metrics.recordsProcessed) * 100 : 0,
errorRate: job.metrics.recordsProcessed > 0 ?
(job.metrics.recordsFailed / job.metrics.recordsProcessed) * 100 : 0,
status: job.status,
startedAt: job.startedAt,
completedAt: job.completedAt
};
return c.json({
success: true,
data: metrics
});
} catch (error) {
logger.error('Failed to get job metrics:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to get job metrics'
}, 500);
}
}
async getJobStats(c: Context): Promise<Response> {
try {
const jobs = this.orchestrator.listJobs();
const stats = {
total: jobs.length,
byStatus: {
pending: jobs.filter(j => j.status === JobStatus.PENDING).length,
running: jobs.filter(j => j.status === JobStatus.RUNNING).length,
completed: jobs.filter(j => j.status === JobStatus.COMPLETED).length,
failed: jobs.filter(j => j.status === JobStatus.FAILED).length,
cancelled: jobs.filter(j => j.status === JobStatus.CANCELLED).length,
},
metrics: {
totalRecordsProcessed: jobs.reduce((sum, j) => sum + j.metrics.recordsProcessed, 0),
totalRecordsSuccessful: jobs.reduce((sum, j) => sum + j.metrics.recordsSuccessful, 0),
totalRecordsFailed: jobs.reduce((sum, j) => sum + j.metrics.recordsFailed, 0),
averageProcessingTime: jobs.length > 0 ?
jobs.reduce((sum, j) => sum + j.metrics.processingTimeMs, 0) / jobs.length : 0,
successRate: jobs.length > 0 ?
(jobs.filter(j => j.status === JobStatus.COMPLETED).length / jobs.length) * 100 : 0
},
recentJobs: jobs
.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime())
.slice(0, 10)
.map(job => ({
id: job.id,
pipelineId: job.pipelineId,
status: job.status,
createdAt: job.createdAt,
processingTime: job.metrics.processingTimeMs,
recordsProcessed: job.metrics.recordsProcessed
}))
};
return c.json({
success: true,
data: stats
});
} catch (error) {
logger.error('Failed to get job stats:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to get job stats'
}, 500);
}
}
}

View file

@ -1,348 +0,0 @@
import { Context } from 'hono';
import { getLogger } from '@stock-bot/logger';
import { DataPipelineOrchestrator } from '../core/DataPipelineOrchestrator';
import { DataPipeline, PipelineStatus } from '../types/DataPipeline';
const logger = getLogger('pipeline-controller');
export class PipelineController {
constructor(private orchestrator: DataPipelineOrchestrator) {}
async listPipelines(c: Context): Promise<Response> {
try {
const pipelines = this.orchestrator.listPipelines();
return c.json({
success: true,
data: pipelines,
total: pipelines.length
});
} catch (error) {
logger.error('Failed to list pipelines:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to list pipelines'
}, 500);
}
}
async createPipeline(c: Context): Promise<Response> {
try {
const pipelineData = await c.req.json();
// Validate required fields
if (!pipelineData.name) {
return c.json({
success: false,
error: 'Pipeline name is required'
}, 400);
}
const pipeline = await this.orchestrator.createPipeline(pipelineData);
logger.info(`Created pipeline: ${pipeline.name} (${pipeline.id})`);
return c.json({
success: true,
data: pipeline
}, 201);
} catch (error) {
logger.error('Failed to create pipeline:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to create pipeline'
}, 500);
}
}
async getPipeline(c: Context): Promise<Response> {
try {
const pipelineId = c.req.param('id');
const pipeline = this.orchestrator.getPipeline(pipelineId);
if (!pipeline) {
return c.json({
success: false,
error: 'Pipeline not found'
}, 404);
}
return c.json({
success: true,
data: pipeline
});
} catch (error) {
logger.error('Failed to get pipeline:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to get pipeline'
}, 500);
}
}
async updatePipeline(c: Context): Promise<Response> {
try {
const pipelineId = c.req.param('id');
const updateData = await c.req.json();
const existingPipeline = this.orchestrator.getPipeline(pipelineId);
if (!existingPipeline) {
return c.json({
success: false,
error: 'Pipeline not found'
}, 404);
}
// Update pipeline (in a real implementation, this would use a proper update method)
const updatedPipeline: DataPipeline = {
...existingPipeline,
...updateData,
id: pipelineId, // Ensure ID doesn't change
updatedAt: new Date()
};
// In a real implementation, save to persistent storage
logger.info(`Updated pipeline: ${updatedPipeline.name} (${pipelineId})`);
return c.json({
success: true,
data: updatedPipeline
});
} catch (error) {
logger.error('Failed to update pipeline:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to update pipeline'
}, 500);
}
}
async deletePipeline(c: Context): Promise<Response> {
try {
const pipelineId = c.req.param('id');
const pipeline = this.orchestrator.getPipeline(pipelineId);
if (!pipeline) {
return c.json({
success: false,
error: 'Pipeline not found'
}, 404);
}
// Check if pipeline is running
const runningJobs = this.orchestrator.listJobs(pipelineId);
if (runningJobs.length > 0) {
return c.json({
success: false,
error: 'Cannot delete pipeline with running jobs'
}, 400);
}
// In a real implementation, delete from persistent storage
logger.info(`Deleted pipeline: ${pipeline.name} (${pipelineId})`);
return c.json({
success: true,
message: 'Pipeline deleted successfully'
});
} catch (error) {
logger.error('Failed to delete pipeline:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to delete pipeline'
}, 500);
}
}
async runPipeline(c: Context): Promise<Response> {
try {
const pipelineId = c.req.param('id');
const parameters = await c.req.json().catch(() => ({}));
const pipeline = this.orchestrator.getPipeline(pipelineId);
if (!pipeline) {
return c.json({
success: false,
error: 'Pipeline not found'
}, 404);
}
if (pipeline.status !== PipelineStatus.ACTIVE) {
return c.json({
success: false,
error: 'Pipeline is not active'
}, 400);
}
const job = await this.orchestrator.runPipeline(pipelineId, parameters);
logger.info(`Started pipeline job: ${job.id} for pipeline: ${pipelineId}`);
return c.json({
success: true,
data: job
}, 202);
} catch (error) {
logger.error('Failed to run pipeline:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to run pipeline'
}, 500);
}
}
async schedulePipeline(c: Context): Promise<Response> {
try {
const pipelineId = c.req.param('id');
const { cronExpression } = await c.req.json();
if (!cronExpression) {
return c.json({
success: false,
error: 'Cron expression is required'
}, 400);
}
const pipeline = this.orchestrator.getPipeline(pipelineId);
if (!pipeline) {
return c.json({
success: false,
error: 'Pipeline not found'
}, 404);
}
await this.orchestrator.schedulePipeline(pipelineId, cronExpression);
logger.info(`Scheduled pipeline: ${pipelineId} with cron: ${cronExpression}`);
return c.json({
success: true,
message: 'Pipeline scheduled successfully',
data: {
pipelineId,
cronExpression
}
});
} catch (error) {
logger.error('Failed to schedule pipeline:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to schedule pipeline'
}, 500);
}
}
async pausePipeline(c: Context): Promise<Response> {
try {
const pipelineId = c.req.param('id');
const pipeline = this.orchestrator.getPipeline(pipelineId);
if (!pipeline) {
return c.json({
success: false,
error: 'Pipeline not found'
}, 404);
}
// Update pipeline status to paused
pipeline.status = PipelineStatus.PAUSED;
pipeline.updatedAt = new Date();
logger.info(`Paused pipeline: ${pipelineId}`);
return c.json({
success: true,
message: 'Pipeline paused successfully',
data: pipeline
});
} catch (error) {
logger.error('Failed to pause pipeline:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to pause pipeline'
}, 500);
}
}
async resumePipeline(c: Context): Promise<Response> {
try {
const pipelineId = c.req.param('id');
const pipeline = this.orchestrator.getPipeline(pipelineId);
if (!pipeline) {
return c.json({
success: false,
error: 'Pipeline not found'
}, 404);
}
// Update pipeline status to active
pipeline.status = PipelineStatus.ACTIVE;
pipeline.updatedAt = new Date();
logger.info(`Resumed pipeline: ${pipelineId}`);
return c.json({
success: true,
message: 'Pipeline resumed successfully',
data: pipeline
});
} catch (error) {
logger.error('Failed to resume pipeline:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to resume pipeline'
}, 500);
}
}
async getPipelineMetrics(c: Context): Promise<Response> {
try {
const pipelineId = c.req.param('id');
const pipeline = this.orchestrator.getPipeline(pipelineId);
if (!pipeline) {
return c.json({
success: false,
error: 'Pipeline not found'
}, 404);
}
const jobs = this.orchestrator.listJobs(pipelineId);
const metrics = {
totalJobs: jobs.length,
completedJobs: jobs.filter(j => j.status === 'completed').length,
failedJobs: jobs.filter(j => j.status === 'failed').length,
runningJobs: jobs.filter(j => j.status === 'running').length,
totalRecordsProcessed: jobs.reduce((sum, j) => sum + j.metrics.recordsProcessed, 0),
totalProcessingTime: jobs.reduce((sum, j) => sum + j.metrics.processingTimeMs, 0),
averageProcessingTime: jobs.length > 0 ?
jobs.reduce((sum, j) => sum + j.metrics.processingTimeMs, 0) / jobs.length : 0,
successRate: jobs.length > 0 ?
(jobs.filter(j => j.status === 'completed').length / jobs.length) * 100 : 0
};
return c.json({
success: true,
data: metrics
});
} catch (error) {
logger.error('Failed to get pipeline metrics:', error);
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Failed to get pipeline metrics'
}, 500);
}
}
}

View file

@ -1,299 +0,0 @@
import { EventBus, EventBusConfig } from '@stock-bot/event-bus';
import { DataPipelineEvent, DataJobEvent } from '@stock-bot/types';
import { getLogger } from '@stock-bot/logger';
import { DataPipeline, PipelineStatus, PipelineJob, JobStatus } from '../types/DataPipeline';
import { DataIngestionService } from '../services/DataIngestionService';
import { DataTransformationService } from '../services/DataTransformationService';
import { DataValidationService } from '../services/DataValidationService';
import { DataQualityService } from '../services/DataQualityService';
import { PipelineScheduler } from './PipelineScheduler';
import { JobQueue } from './JobQueue';
const logger = getLogger('data-pipeline-orchestrator');
export class DataPipelineOrchestrator {
private eventBus: EventBus;
private scheduler: PipelineScheduler;
private jobQueue: JobQueue;
private pipelines: Map<string, DataPipeline> = new Map();
private runningJobs: Map<string, PipelineJob> = new Map();
constructor(
private ingestionService: DataIngestionService,
private transformationService: DataTransformationService,
private validationService: DataValidationService,
private qualityService: DataQualityService
) {
const eventBusConfig: EventBusConfig = {
redisHost: process.env.REDIS_HOST || 'localhost',
redisPort: parseInt(process.env.REDIS_PORT || '6379'),
redisPassword: process.env.REDIS_PASSWORD
};
this.eventBus = new EventBus(eventBusConfig);
this.scheduler = new PipelineScheduler(this);
this.jobQueue = new JobQueue(this);
}
async initialize(): Promise<void> {
logger.info('🔄 Initializing Data Pipeline Orchestrator...');
// EventBus doesn't have initialize method, it connects automatically
await this.scheduler.initialize();
await this.jobQueue.initialize();
// Subscribe to pipeline events
this.eventBus.subscribe('data.pipeline.*', this.handlePipelineEvent.bind(this));
this.eventBus.subscribe('data.job.*', this.handleJobEvent.bind(this));
// Load existing pipelines
await this.loadPipelines();
logger.info('✅ Data Pipeline Orchestrator initialized');
}
async createPipeline(pipeline: Omit<DataPipeline, 'id' | 'createdAt' | 'updatedAt'>): Promise<DataPipeline> {
const pipelineWithId: DataPipeline = {
...pipeline,
id: this.generatePipelineId(),
status: PipelineStatus.DRAFT,
createdAt: new Date(),
updatedAt: new Date(),
}; this.pipelines.set(pipelineWithId.id, pipelineWithId);
await this.eventBus.publish('data.pipeline.created', {
type: 'PIPELINE_CREATED',
pipelineId: pipelineWithId.id,
pipelineName: pipelineWithId.name,
timestamp: new Date()
} as DataPipelineEvent);
logger.info(`📋 Created pipeline: ${pipelineWithId.name} (${pipelineWithId.id})`);
return pipelineWithId;
}
async runPipeline(pipelineId: string, parameters?: Record<string, any>): Promise<PipelineJob> {
const pipeline = this.pipelines.get(pipelineId);
if (!pipeline) {
throw new Error(`Pipeline not found: ${pipelineId}`);
}
if (pipeline.status !== PipelineStatus.ACTIVE) {
throw new Error(`Pipeline is not active: ${pipeline.status}`);
}
const job: PipelineJob = {
id: this.generateJobId(),
pipelineId,
status: JobStatus.PENDING,
parameters: parameters || {},
createdAt: new Date(),
startedAt: null,
completedAt: null,
error: null,
metrics: {
recordsProcessed: 0,
recordsSuccessful: 0,
recordsFailed: 0,
processingTimeMs: 0,
},
};
this.runningJobs.set(job.id, job);
// Queue the job for execution
await this.jobQueue.enqueueJob(job);
await this.eventBus.publish('data.job.queued', {
type: 'JOB_STARTED',
jobId: job.id,
pipelineId,
timestamp: new Date()
} as DataJobEvent);
logger.info(`🚀 Queued pipeline job: ${job.id} for pipeline: ${pipeline.name}`);
return job;
}
async executePipelineJob(job: PipelineJob): Promise<void> {
const pipeline = this.pipelines.get(job.pipelineId);
if (!pipeline) {
throw new Error(`Pipeline not found: ${job.pipelineId}`);
}
const startTime = Date.now(); job.status = JobStatus.RUNNING;
job.startedAt = new Date();
await this.eventBus.publish('data.job.started', {
type: 'JOB_STARTED',
jobId: job.id,
pipelineId: job.pipelineId,
timestamp: new Date()
} as DataJobEvent);
try {
logger.info(`⚙️ Executing pipeline job: ${job.id}`);
// Execute pipeline steps
await this.executeIngestionStep(pipeline, job);
await this.executeTransformationStep(pipeline, job);
await this.executeValidationStep(pipeline, job);
await this.executeQualityChecks(pipeline, job);
// Complete the job
job.status = JobStatus.COMPLETED; job.completedAt = new Date();
job.metrics.processingTimeMs = Date.now() - startTime;
await this.eventBus.publish('data.job.completed', {
type: 'JOB_COMPLETED',
jobId: job.id,
pipelineId: job.pipelineId,
timestamp: new Date()
} as DataJobEvent);
logger.info(`✅ Pipeline job completed: ${job.id} in ${job.metrics.processingTimeMs}ms`);
} catch (error) {
job.status = JobStatus.FAILED;
job.completedAt = new Date(); job.error = error instanceof Error ? error.message : 'Unknown error';
job.metrics.processingTimeMs = Date.now() - startTime;
await this.eventBus.publish('data.job.failed', {
type: 'JOB_FAILED',
jobId: job.id,
pipelineId: job.pipelineId,
error: job.error,
timestamp: new Date()
} as DataJobEvent);
logger.error(`❌ Pipeline job failed: ${job.id}`, error);
throw error;
}
}
private async executeIngestionStep(pipeline: DataPipeline, job: PipelineJob): Promise<void> {
if (!pipeline.steps.ingestion) return;
logger.info(`📥 Executing ingestion step for job: ${job.id}`);
const result = await this.ingestionService.ingestData(
pipeline.steps.ingestion,
job.parameters
);
job.metrics.recordsProcessed += result.recordsProcessed;
job.metrics.recordsSuccessful += result.recordsSuccessful;
job.metrics.recordsFailed += result.recordsFailed;
}
private async executeTransformationStep(pipeline: DataPipeline, job: PipelineJob): Promise<void> {
if (!pipeline.steps.transformation) return;
logger.info(`🔄 Executing transformation step for job: ${job.id}`);
const result = await this.transformationService.transformData(
pipeline.steps.transformation,
job.parameters
);
job.metrics.recordsProcessed += result.recordsProcessed;
job.metrics.recordsSuccessful += result.recordsSuccessful;
job.metrics.recordsFailed += result.recordsFailed;
}
private async executeValidationStep(pipeline: DataPipeline, job: PipelineJob): Promise<void> {
if (!pipeline.steps.validation) return;
logger.info(`✅ Executing validation step for job: ${job.id}`);
const result = await this.validationService.validateData(
pipeline.steps.validation,
job.parameters
);
job.metrics.recordsProcessed += result.recordsProcessed;
job.metrics.recordsSuccessful += result.recordsSuccessful;
job.metrics.recordsFailed += result.recordsFailed;
}
private async executeQualityChecks(pipeline: DataPipeline, job: PipelineJob): Promise<void> {
if (!pipeline.steps.qualityChecks) return;
logger.info(`🔍 Executing quality checks for job: ${job.id}`);
await this.qualityService.runQualityChecks(
pipeline.steps.qualityChecks,
job.parameters
);
}
async schedulePipeline(pipelineId: string, cronExpression: string): Promise<void> {
const pipeline = this.pipelines.get(pipelineId);
if (!pipeline) {
throw new Error(`Pipeline not found: ${pipelineId}`);
}
await this.scheduler.schedulePipeline(pipelineId, cronExpression);
pipeline.schedule = {
cronExpression,
enabled: true,
lastRun: null, nextRun: this.scheduler.getNextRunTime(cronExpression),
};
await this.eventBus.publish('data.pipeline.scheduled', {
type: 'PIPELINE_STARTED',
pipelineId,
pipelineName: pipeline.name,
timestamp: new Date()
} as DataPipelineEvent);
logger.info(`📅 Scheduled pipeline: ${pipeline.name} with cron: ${cronExpression}`);
}
// Pipeline CRUD operations
getPipeline(pipelineId: string): DataPipeline | undefined {
return this.pipelines.get(pipelineId);
}
listPipelines(): DataPipeline[] {
return Array.from(this.pipelines.values());
}
getJob(jobId: string): PipelineJob | undefined {
return this.runningJobs.get(jobId);
}
listJobs(pipelineId?: string): PipelineJob[] {
const jobs = Array.from(this.runningJobs.values());
return pipelineId ? jobs.filter(job => job.pipelineId === pipelineId) : jobs;
}
private async handlePipelineEvent(event: any): Promise<void> {
logger.debug('📨 Received pipeline event:', event);
// Handle pipeline-level events
}
private async handleJobEvent(event: any): Promise<void> {
logger.debug('📨 Received job event:', event);
// Handle job-level events
}
private async loadPipelines(): Promise<void> {
// In a real implementation, load pipelines from persistent storage
logger.info('📂 Loading existing pipelines...');
}
private generatePipelineId(): string {
return `pipeline_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
private generateJobId(): string {
return `job_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
async shutdown(): Promise<void> {
logger.info('🔄 Shutting down Data Pipeline Orchestrator...');
await this.scheduler.shutdown();
await this.jobQueue.shutdown();
await this.eventBus.close();
logger.info('✅ Data Pipeline Orchestrator shutdown complete');
}
}

View file

@ -1,79 +0,0 @@
import Queue from 'bull';
import { getLogger } from '@stock-bot/logger';
import { PipelineJob } from '../types/DataPipeline';
import { DataPipelineOrchestrator } from './DataPipelineOrchestrator';
const logger = getLogger('job-queue');
export class JobQueue {
private queue: Queue.Queue;
constructor(private orchestrator: DataPipelineOrchestrator) {
this.queue = new Queue('data-pipeline-jobs', {
redis: {
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT || '6379'),
},
});
}
async initialize(): Promise<void> {
logger.info('🔄 Initializing Job Queue...');
// Process jobs with a maximum of 5 concurrent jobs
this.queue.process('pipeline-job', 5, async (job) => {
const pipelineJob: PipelineJob = job.data;
await this.orchestrator.executePipelineJob(pipelineJob);
});
// Handle job events
this.queue.on('completed', (job) => {
logger.info(`✅ Job completed: ${job.id}`);
});
this.queue.on('failed', (job, error) => {
logger.error(`❌ Job failed: ${job.id}`, error);
});
this.queue.on('stalled', (job) => {
logger.warn(`⚠️ Job stalled: ${job.id}`);
});
logger.info('✅ Job Queue initialized');
}
async enqueueJob(job: PipelineJob): Promise<void> {
await this.queue.add('pipeline-job', job, {
jobId: job.id,
removeOnComplete: 100, // Keep last 100 completed jobs
removeOnFail: 50, // Keep last 50 failed jobs
attempts: 3, // Retry failed jobs up to 3 times
backoff: {
type: 'exponential',
delay: 2000,
},
});
logger.info(`📤 Enqueued job: ${job.id}`);
}
async getJobStats(): Promise<any> {
const waiting = await this.queue.getWaiting();
const active = await this.queue.getActive();
const completed = await this.queue.getCompleted();
const failed = await this.queue.getFailed();
return {
waiting: waiting.length,
active: active.length,
completed: completed.length,
failed: failed.length,
};
}
async shutdown(): Promise<void> {
logger.info('🔄 Shutting down Job Queue...');
await this.queue.close();
logger.info('✅ Job Queue shutdown complete');
}
}

View file

@ -1,70 +0,0 @@
import { CronJob } from 'cron';
import { getLogger } from '@stock-bot/logger';
import { DataPipelineOrchestrator } from './DataPipelineOrchestrator';
const logger = getLogger('pipeline-scheduler');
export class PipelineScheduler {
private scheduledJobs: Map<string, CronJob> = new Map();
constructor(private orchestrator: DataPipelineOrchestrator) {}
async initialize(): Promise<void> {
logger.info('🔄 Initializing Pipeline Scheduler...');
logger.info('✅ Pipeline Scheduler initialized');
}
async schedulePipeline(pipelineId: string, cronExpression: string): Promise<void> {
// Cancel existing schedule if it exists
if (this.scheduledJobs.has(pipelineId)) {
this.cancelSchedule(pipelineId);
}
const cronJob = new CronJob(
cronExpression,
async () => {
try {
logger.info(`⏰ Scheduled execution triggered for pipeline: ${pipelineId}`);
await this.orchestrator.runPipeline(pipelineId);
} catch (error) {
logger.error(`❌ Scheduled pipeline execution failed: ${pipelineId}`, error);
}
},
null,
true, // Start immediately
'UTC'
);
this.scheduledJobs.set(pipelineId, cronJob);
logger.info(`📅 Scheduled pipeline ${pipelineId} with cron: ${cronExpression}`);
}
cancelSchedule(pipelineId: string): void {
const job = this.scheduledJobs.get(pipelineId);
if (job) {
job.stop();
this.scheduledJobs.delete(pipelineId);
logger.info(`🚫 Cancelled schedule for pipeline: ${pipelineId}`);
}
}
getNextRunTime(cronExpression: string): Date {
const job = new CronJob(cronExpression, () => {}, null, false);
return job.nextDate().toJSDate();
}
getScheduledPipelines(): string[] {
return Array.from(this.scheduledJobs.keys());
}
async shutdown(): Promise<void> {
logger.info('🔄 Shutting down Pipeline Scheduler...');
for (const [pipelineId, job] of this.scheduledJobs) {
job.stop();
logger.info(`🚫 Stopped scheduled job for pipeline: ${pipelineId}`);
}
this.scheduledJobs.clear();
logger.info('✅ Pipeline Scheduler shutdown complete');
}
}

View file

@ -1,119 +0,0 @@
import { Hono } from 'hono';
import { serve } from 'bun';
import { getLogger, loggingMiddleware, errorLoggingMiddleware } from '@stock-bot/logger';
import { DataPipelineOrchestrator } from './core/DataPipelineOrchestrator';
import { DataQualityService } from './services/DataQualityService';
import { DataIngestionService } from './services/DataIngestionService';
import { DataTransformationService } from './services/DataTransformationService';
import { DataValidationService } from './services/DataValidationService';
import { HealthController } from './controllers/HealthController';
import { PipelineController } from './controllers/PipelineController';
import { JobController } from './controllers/JobController';
const app = new Hono();
// Initialize logger
const logger = getLogger('data-processor');
// Add logging middleware
app.use('*', loggingMiddleware({
serviceName: 'data-processor',
skipPaths: ['/health']
}));
// Add error logging middleware
app.use('*', errorLoggingMiddleware(logger));
// Services
const dataQualityService = new DataQualityService();
const dataIngestionService = new DataIngestionService();
const dataTransformationService = new DataTransformationService();
const dataValidationService = new DataValidationService();
// Core orchestrator
const pipelineOrchestrator = new DataPipelineOrchestrator(
dataIngestionService,
dataTransformationService,
dataValidationService,
dataQualityService
);
// Controllers
const healthController = new HealthController();
const pipelineController = new PipelineController(pipelineOrchestrator);
const jobController = new JobController(pipelineOrchestrator);
// Health endpoints
app.get('/health', healthController.getHealth.bind(healthController));
app.get('/health/detailed', healthController.getDetailedHealth.bind(healthController));
// Pipeline management
app.get('/api/pipelines', pipelineController.listPipelines.bind(pipelineController));
app.post('/api/pipelines', pipelineController.createPipeline.bind(pipelineController));
app.get('/api/pipelines/:id', pipelineController.getPipeline.bind(pipelineController));
app.put('/api/pipelines/:id', pipelineController.updatePipeline.bind(pipelineController));
app.delete('/api/pipelines/:id', pipelineController.deletePipeline.bind(pipelineController));
app.post('/api/pipelines/:id/run', pipelineController.runPipeline.bind(pipelineController));
app.post('/api/pipelines/:id/schedule', pipelineController.schedulePipeline.bind(pipelineController));
app.post('/api/pipelines/:id/pause', pipelineController.pausePipeline.bind(pipelineController));
app.post('/api/pipelines/:id/resume', pipelineController.resumePipeline.bind(pipelineController));
app.get('/api/pipelines/:id/metrics', pipelineController.getPipelineMetrics.bind(pipelineController));
// Job management
app.get('/api/jobs', jobController.listJobs.bind(jobController));
app.get('/api/jobs/stats', jobController.getJobStats.bind(jobController));
app.get('/api/jobs/:id', jobController.getJob.bind(jobController));
app.get('/api/jobs/:id/logs', jobController.getJobLogs.bind(jobController));
app.get('/api/jobs/:id/metrics', jobController.getJobMetrics.bind(jobController));
app.post('/api/jobs/:id/cancel', jobController.cancelJob.bind(jobController));
app.post('/api/jobs/:id/retry', jobController.retryJob.bind(jobController));
// Data quality endpoints
app.get('/api/data-quality/metrics', async (c) => {
const metrics = await dataQualityService.getQualityMetrics();
return c.json({ success: true, data: metrics });
});
app.get('/api/data-quality/report/:dataset', async (c) => {
const dataset = c.req.param('dataset');
const report = await dataQualityService.generateReport(dataset);
return c.json({ success: true, data: report });
});
const PORT = parseInt(process.env.DATA_PROCESSOR_PORT || '5001');
// Initialize services
async function initializeServices() {
try {
logger.info('🔄 Initializing Data Processor services...');
await dataQualityService.initialize();
await dataIngestionService.initialize();
await dataTransformationService.initialize();
await dataValidationService.initialize();
await pipelineOrchestrator.initialize();
logger.info('✅ Data Processor services initialized successfully');
} catch (error) {
logger.error('❌ Failed to initialize Data Processor services:', error);
process.exit(1);
}
}
// Graceful shutdown
process.on('SIGINT', async () => {
logger.info('🔄 Gracefully shutting down Data Processor...');
await pipelineOrchestrator.shutdown();
process.exit(0);
});
initializeServices().then(() => {
serve({
port: PORT,
fetch: app.fetch,
});
logger.info(`🚀 Data Processor running on port ${PORT}`);
logger.info(`🔍 Health check: http://localhost:${PORT}/health`);
logger.info(`📊 API documentation: http://localhost:${PORT}/api`);
});

View file

@ -1,200 +0,0 @@
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('DataIngestionService');
import { IngestionStep, ProcessingResult, DataSource } from '../types/DataPipeline';
import axios from 'axios';
import csv from 'csv-parser';
import * as fs from 'fs';
export class DataIngestionService {
private activeConnections: Map<string, any> = new Map();
async initialize(): Promise<void> {
logger.info('🔄 Initializing Data Ingestion Service...');
logger.info('✅ Data Ingestion Service initialized');
}
async ingestData(step: IngestionStep, parameters: Record<string, any>): Promise<ProcessingResult> {
const startTime = Date.now();
logger.info(`📥 Starting data ingestion from ${step.source.type}: ${step.source.connection.url || step.source.connection.host}`);
try {
switch (step.source.type) {
case 'api':
return await this.ingestFromApi(step.source, parameters);
case 'file':
return await this.ingestFromFile(step.source, parameters);
case 'database':
return await this.ingestFromDatabase(step.source, parameters);
case 'stream':
return await this.ingestFromStream(step.source, parameters);
default:
throw new Error(`Unsupported ingestion type: ${step.source.type}`);
}
} catch (error) {
const processingTime = Date.now() - startTime;
logger.error(`❌ Data ingestion failed after ${processingTime}ms:`, error);
return {
recordsProcessed: 0,
recordsSuccessful: 0,
recordsFailed: 0,
errors: [{
record: 0,
message: error instanceof Error ? error.message : 'Unknown error',
code: 'INGESTION_ERROR'
}],
metadata: { processingTimeMs: processingTime }
};
}
}
private async ingestFromApi(source: DataSource, parameters: Record<string, any>): Promise<ProcessingResult> {
const config = {
method: 'GET',
url: source.connection.url,
headers: source.connection.headers || {},
params: { ...source.connection.params, ...parameters },
};
if (source.connection.apiKey) {
config.headers['Authorization'] = `Bearer ${source.connection.apiKey}`;
}
const response = await axios(config);
const data = response.data;
// Process the data based on format
let records: any[] = [];
if (Array.isArray(data)) {
records = data;
} else if (data.data && Array.isArray(data.data)) {
records = data.data;
} else if (data.results && Array.isArray(data.results)) {
records = data.results;
} else {
records = [data];
}
logger.info(`📊 Ingested ${records.length} records from API: ${source.connection.url}`);
return {
recordsProcessed: records.length,
recordsSuccessful: records.length,
recordsFailed: 0,
errors: [],
metadata: {
source: 'api',
url: source.connection.url,
statusCode: response.status,
responseSize: JSON.stringify(data).length
}
};
}
private async ingestFromFile(source: DataSource, parameters: Record<string, any>): Promise<ProcessingResult> {
const filePath = source.connection.url || parameters.filePath;
if (!filePath) {
throw new Error('File path is required for file ingestion');
}
switch (source.format) {
case 'csv':
return await this.ingestCsvFile(filePath);
case 'json':
return await this.ingestJsonFile(filePath);
default:
throw new Error(`Unsupported file format: ${source.format}`);
}
}
private async ingestCsvFile(filePath: string): Promise<ProcessingResult> {
return new Promise((resolve, reject) => {
const records: any[] = [];
const errors: any[] = [];
let recordCount = 0; fs.createReadStream(filePath)
.pipe(csv())
.on('data', (data: any) => {
recordCount++;
try {
records.push(data);
} catch (error) {
errors.push({
record: recordCount,
message: error instanceof Error ? error.message : 'Parse error',
code: 'CSV_PARSE_ERROR'
});
}
})
.on('end', () => {
logger.info(`📊 Ingested ${records.length} records from CSV: ${filePath}`);
resolve({
recordsProcessed: recordCount,
recordsSuccessful: records.length,
recordsFailed: errors.length,
errors,
metadata: {
source: 'file',
format: 'csv',
filePath
}
});
})
.on('error', reject);
});
}
private async ingestJsonFile(filePath: string): Promise<ProcessingResult> {
const fileContent = await fs.promises.readFile(filePath, 'utf8');
const data = JSON.parse(fileContent);
let records: any[] = [];
if (Array.isArray(data)) {
records = data;
} else {
records = [data];
}
logger.info(`📊 Ingested ${records.length} records from JSON: ${filePath}`);
return {
recordsProcessed: records.length,
recordsSuccessful: records.length,
recordsFailed: 0,
errors: [],
metadata: {
source: 'file',
format: 'json',
filePath,
fileSize: fileContent.length
}
};
}
private async ingestFromDatabase(source: DataSource, parameters: Record<string, any>): Promise<ProcessingResult> {
// Placeholder for database ingestion
// In a real implementation, this would connect to various databases
// (PostgreSQL, MySQL, MongoDB, etc.) and execute queries
throw new Error('Database ingestion not yet implemented');
}
private async ingestFromStream(source: DataSource, parameters: Record<string, any>): Promise<ProcessingResult> {
// Placeholder for stream ingestion
// In a real implementation, this would connect to streaming sources
// (Kafka, Kinesis, WebSocket, etc.)
throw new Error('Stream ingestion not yet implemented');
}
async getIngestionMetrics(): Promise<any> {
return {
activeConnections: this.activeConnections.size,
supportedSources: ['api', 'file', 'database', 'stream'],
supportedFormats: ['json', 'csv', 'xml', 'parquet', 'avro']
};
}
}

View file

@ -1,376 +0,0 @@
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('DataQualityService');
import { QualityCheckStep, ProcessingResult, QualityCheck, QualityThresholds } from '../types/DataPipeline';
export class DataQualityService {
private qualityMetrics: Map<string, any> = new Map();
private qualityReports: Map<string, any> = new Map();
async initialize(): Promise<void> {
logger.info('🔄 Initializing Data Quality Service...');
// Initialize quality metrics storage
this.qualityMetrics.clear();
this.qualityReports.clear();
logger.info('✅ Data Quality Service initialized');
}
async runQualityChecks(step: QualityCheckStep, parameters: Record<string, any>): Promise<ProcessingResult> {
const startTime = Date.now();
logger.info(`🔍 Running ${step.checks.length} quality checks`);
const inputData = parameters.inputData || [];
const results: any[] = [];
const errors: any[] = [];
let totalScore = 0;
try {
for (const check of step.checks) {
const checkResult = await this.executeQualityCheck(check, inputData);
results.push(checkResult);
totalScore += checkResult.score;
// Check if the quality score meets thresholds
if (checkResult.score < step.thresholds.error) {
errors.push({
record: 0,
field: check.field,
message: `Quality check failed: ${check.name} scored ${checkResult.score}%, below error threshold ${step.thresholds.error}%`,
code: 'QUALITY_CHECK_ERROR'
});
} else if (checkResult.score < step.thresholds.warning) {
logger.warn(`⚠️ Quality warning: ${check.name} scored ${checkResult.score}%, below warning threshold ${step.thresholds.warning}%`);
}
}
const averageScore = totalScore / step.checks.length;
const processingTime = Date.now() - startTime;
// Store quality metrics
this.storeQualityMetrics({
timestamp: new Date(),
averageScore,
checksRun: step.checks.length,
results,
processingTimeMs: processingTime
});
logger.info(`🔍 Quality checks completed: ${averageScore.toFixed(2)}% average score in ${processingTime}ms`);
return {
recordsProcessed: inputData.length,
recordsSuccessful: errors.length === 0 ? inputData.length : 0,
recordsFailed: errors.length > 0 ? inputData.length : 0,
errors,
metadata: {
qualityScore: averageScore,
checksRun: step.checks.length,
results,
processingTimeMs: processingTime
}
};
} catch (error) {
const processingTime = Date.now() - startTime;
logger.error(`❌ Quality checks failed after ${processingTime}ms:`, error);
return {
recordsProcessed: inputData.length,
recordsSuccessful: 0,
recordsFailed: inputData.length,
errors: [{
record: 0,
message: error instanceof Error ? error.message : 'Unknown quality check error',
code: 'QUALITY_SERVICE_ERROR'
}],
metadata: { processingTimeMs: processingTime }
};
}
}
private async executeQualityCheck(check: QualityCheck, data: any[]): Promise<any> {
switch (check.type) {
case 'completeness':
return this.checkCompleteness(check, data);
case 'accuracy':
return this.checkAccuracy(check, data);
case 'consistency':
return this.checkConsistency(check, data);
case 'validity':
return this.checkValidity(check, data);
case 'uniqueness':
return this.checkUniqueness(check, data);
default:
throw new Error(`Unsupported quality check type: ${check.type}`);
}
}
private checkCompleteness(check: QualityCheck, data: any[]): any {
if (!check.field) {
throw new Error('Completeness check requires a field');
}
const totalRecords = data.length;
const completeRecords = data.filter(record => {
const value = this.getFieldValue(record, check.field!);
return value !== null && value !== undefined && value !== '';
}).length;
const score = totalRecords > 0 ? (completeRecords / totalRecords) * 100 : 100;
return {
checkName: check.name,
type: 'completeness',
field: check.field,
score,
passed: score >= check.threshold,
details: {
totalRecords,
completeRecords,
missingRecords: totalRecords - completeRecords
}
};
}
private checkAccuracy(check: QualityCheck, data: any[]): any {
// Placeholder for accuracy checks
// In a real implementation, this would validate data against known references
// or business rules specific to stock market data
const score = 95; // Mock score
return {
checkName: check.name,
type: 'accuracy',
field: check.field,
score,
passed: score >= check.threshold,
details: {
validatedRecords: data.length,
accurateRecords: Math.floor(data.length * 0.95)
}
};
}
private checkConsistency(check: QualityCheck, data: any[]): any {
if (!check.field) {
throw new Error('Consistency check requires a field');
}
// Check for consistent data types and formats
const fieldValues = data.map(record => this.getFieldValue(record, check.field!));
const types = [...new Set(fieldValues.map(val => typeof val))];
// For stock symbols, check consistent format
if (check.field === 'symbol') {
const validSymbols = fieldValues.filter(symbol =>
typeof symbol === 'string' && /^[A-Z]{1,5}$/.test(symbol)
).length;
const score = fieldValues.length > 0 ? (validSymbols / fieldValues.length) * 100 : 100;
return {
checkName: check.name,
type: 'consistency',
field: check.field,
score,
passed: score >= check.threshold,
details: {
totalValues: fieldValues.length,
consistentValues: validSymbols,
inconsistentValues: fieldValues.length - validSymbols
}
};
}
// Generic consistency check
const score = types.length === 1 ? 100 : 0;
return {
checkName: check.name,
type: 'consistency',
field: check.field,
score,
passed: score >= check.threshold,
details: {
dataTypes: types,
isConsistent: types.length === 1
}
};
}
private checkValidity(check: QualityCheck, data: any[]): any {
if (!check.field) {
throw new Error('Validity check requires a field');
}
let validRecords = 0;
const totalRecords = data.length;
for (const record of data) {
const value = this.getFieldValue(record, check.field);
if (this.isValidValue(check.field, value)) {
validRecords++;
}
}
const score = totalRecords > 0 ? (validRecords / totalRecords) * 100 : 100;
return {
checkName: check.name,
type: 'validity',
field: check.field,
score,
passed: score >= check.threshold,
details: {
totalRecords,
validRecords,
invalidRecords: totalRecords - validRecords
}
};
}
private checkUniqueness(check: QualityCheck, data: any[]): any {
if (!check.field) {
throw new Error('Uniqueness check requires a field');
}
const fieldValues = data.map(record => this.getFieldValue(record, check.field!));
const uniqueValues = new Set(fieldValues);
const score = fieldValues.length > 0 ? (uniqueValues.size / fieldValues.length) * 100 : 100;
return {
checkName: check.name,
type: 'uniqueness',
field: check.field,
score,
passed: score >= check.threshold,
details: {
totalValues: fieldValues.length,
uniqueValues: uniqueValues.size,
duplicateValues: fieldValues.length - uniqueValues.size
}
};
}
private getFieldValue(record: any, fieldPath: string): any {
return fieldPath.split('.').reduce((obj, field) => obj?.[field], record);
}
private isValidValue(field: string, value: any): boolean {
switch (field) {
case 'symbol':
return typeof value === 'string' && /^[A-Z]{1,5}$/.test(value);
case 'price':
return typeof value === 'number' && value > 0 && value < 1000000;
case 'volume':
return typeof value === 'number' && value >= 0 && Number.isInteger(value);
case 'timestamp':
return value instanceof Date || !isNaN(new Date(value).getTime());
default:
return value !== null && value !== undefined;
}
}
private storeQualityMetrics(metrics: any): void {
const key = `metrics_${Date.now()}`;
this.qualityMetrics.set(key, metrics);
// Keep only last 100 metrics
if (this.qualityMetrics.size > 100) {
const oldestKey = this.qualityMetrics.keys().next().value;
if (oldestKey) {
this.qualityMetrics.delete(oldestKey);
}
}
}
async getQualityMetrics(dataset?: string): Promise<any> {
const allMetrics = Array.from(this.qualityMetrics.values());
if (allMetrics.length === 0) {
return {
totalChecks: 0,
averageScore: 0,
recentResults: []
};
}
const totalChecks = allMetrics.reduce((sum, m) => sum + m.checksRun, 0);
const averageScore = allMetrics.reduce((sum, m) => sum + m.averageScore, 0) / allMetrics.length;
const recentResults = allMetrics.slice(-10);
return {
totalChecks,
averageScore: Math.round(averageScore * 100) / 100,
recentResults,
summary: {
totalRuns: allMetrics.length,
averageProcessingTime: allMetrics.reduce((sum, m) => sum + m.processingTimeMs, 0) / allMetrics.length
}
};
}
async generateReport(dataset: string): Promise<any> {
const metrics = await this.getQualityMetrics(dataset);
const report = {
dataset,
generatedAt: new Date(),
summary: metrics,
recommendations: this.generateRecommendations(metrics),
trends: this.analyzeTrends(metrics.recentResults)
};
this.qualityReports.set(dataset, report);
return report;
}
private generateRecommendations(metrics: any): string[] {
const recommendations: string[] = [];
if (metrics.averageScore < 80) {
recommendations.push('Overall data quality is below acceptable threshold. Review data ingestion processes.');
}
if (metrics.averageScore < 95 && metrics.averageScore >= 80) {
recommendations.push('Data quality is acceptable but could be improved. Consider implementing additional validation rules.');
}
if (metrics.totalChecks === 0) {
recommendations.push('No quality checks have been run. Implement quality monitoring for your data pipelines.');
}
return recommendations;
}
private analyzeTrends(recentResults: any[]): any {
if (recentResults.length < 2) {
return { trend: 'insufficient_data', message: 'Not enough data to analyze trends' };
}
const scores = recentResults.map(r => r.averageScore);
const latestScore = scores[scores.length - 1];
const previousScore = scores[scores.length - 2];
if (latestScore > previousScore) {
return { trend: 'improving', message: 'Data quality is improving' };
} else if (latestScore < previousScore) {
return { trend: 'declining', message: 'Data quality is declining' };
} else {
return { trend: 'stable', message: 'Data quality is stable' };
}
}
async getAvailableReports(): Promise<string[]> {
return Array.from(this.qualityReports.keys());
}
async getReport(dataset: string): Promise<any | null> {
return this.qualityReports.get(dataset) || null;
}
}

View file

@ -1,293 +0,0 @@
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('DataTransformationService');
import { TransformationStep, ProcessingResult } from '../types/DataPipeline';
export class DataTransformationService {
private transformationFunctions: Map<string, Function> = new Map();
async initialize(): Promise<void> {
logger.info('🔄 Initializing Data Transformation Service...');
// Register built-in transformation functions
this.registerBuiltInTransformations();
logger.info('✅ Data Transformation Service initialized');
}
async transformData(step: TransformationStep, parameters: Record<string, any>): Promise<ProcessingResult> {
const startTime = Date.now();
logger.info(`🔄 Starting data transformation: ${step.type}`);
try {
switch (step.type) {
case 'javascript':
return await this.executeJavaScriptTransformation(step, parameters);
case 'sql':
return await this.executeSqlTransformation(step, parameters);
case 'custom':
return await this.executeCustomTransformation(step, parameters);
default:
throw new Error(`Unsupported transformation type: ${step.type}`);
}
} catch (error) {
const processingTime = Date.now() - startTime;
logger.error(`❌ Data transformation failed after ${processingTime}ms:`, error);
return {
recordsProcessed: 0,
recordsSuccessful: 0,
recordsFailed: 0,
errors: [{
record: 0,
message: error instanceof Error ? error.message : 'Unknown error',
code: 'TRANSFORMATION_ERROR'
}],
metadata: { processingTimeMs: processingTime }
};
}
}
private async executeJavaScriptTransformation(step: TransformationStep, parameters: Record<string, any>): Promise<ProcessingResult> {
const { code, inputData } = step.configuration;
if (!code || !inputData) {
throw new Error('JavaScript transformation requires code and inputData configuration');
}
const transformedRecords: any[] = [];
const errors: any[] = [];
let recordCount = 0;
// Execute transformation for each record
for (const record of inputData) {
recordCount++;
try {
// Create a safe execution context
const context = {
record,
parameters,
utils: this.getTransformationUtils(),
};
// Execute the transformation code
const transformFunction = new Function('context', `
const { record, parameters, utils } = context;
${code}
`);
const result = transformFunction(context);
if (result !== undefined) {
transformedRecords.push(result);
} else {
transformedRecords.push(record); // Keep original if no transformation result
}
} catch (error) {
errors.push({
record: recordCount,
message: error instanceof Error ? error.message : 'Transformation error',
code: 'JS_TRANSFORM_ERROR'
});
}
}
logger.info(`🔄 Transformed ${transformedRecords.length} records using JavaScript`);
return {
recordsProcessed: recordCount,
recordsSuccessful: transformedRecords.length,
recordsFailed: errors.length,
errors,
metadata: {
transformationType: 'javascript',
outputData: transformedRecords
}
};
}
private async executeSqlTransformation(step: TransformationStep, parameters: Record<string, any>): Promise<ProcessingResult> {
// Placeholder for SQL transformation
// In a real implementation, this would execute SQL queries against a data warehouse
// or in-memory SQL engine like DuckDB
throw new Error('SQL transformation not yet implemented');
}
private async executeCustomTransformation(step: TransformationStep, parameters: Record<string, any>): Promise<ProcessingResult> {
const { functionName, inputData } = step.configuration;
if (!functionName) {
throw new Error('Custom transformation requires functionName configuration');
}
const transformFunction = this.transformationFunctions.get(functionName);
if (!transformFunction) {
throw new Error(`Custom transformation function not found: ${functionName}`);
}
const result = await transformFunction(inputData, parameters);
logger.info(`🔄 Executed custom transformation: ${functionName}`);
return result;
}
private registerBuiltInTransformations(): void {
// Market data normalization
this.transformationFunctions.set('normalizeMarketData', (data: any[], parameters: any) => {
const normalized = data.map(record => ({
symbol: record.symbol?.toUpperCase(),
price: parseFloat(record.price) || 0,
volume: parseInt(record.volume) || 0,
timestamp: new Date(record.timestamp || Date.now()),
source: parameters.source || 'unknown'
}));
return {
recordsProcessed: data.length,
recordsSuccessful: normalized.length,
recordsFailed: 0,
errors: [],
metadata: { outputData: normalized }
};
});
// Financial data aggregation
this.transformationFunctions.set('aggregateFinancialData', (data: any[], parameters: any) => {
const { groupBy = 'symbol', aggregations = ['avg', 'sum'] } = parameters;
const grouped = data.reduce((acc, record) => {
const key = record[groupBy];
if (!acc[key]) {
acc[key] = [];
}
acc[key].push(record);
return acc;
}, {} as Record<string, any[]>);
const aggregated = Object.entries(grouped).map(([key, records]) => {
const recordsArray = records as any[];
const result: any = { [groupBy]: key };
if (aggregations.includes('avg')) {
result.avgPrice = recordsArray.reduce((sum: number, r: any) => sum + (r.price || 0), 0) / recordsArray.length;
}
if (aggregations.includes('sum')) {
result.totalVolume = recordsArray.reduce((sum: number, r: any) => sum + (r.volume || 0), 0);
}
if (aggregations.includes('count')) {
result.count = recordsArray.length;
}
return result;
});
return {
recordsProcessed: data.length,
recordsSuccessful: aggregated.length,
recordsFailed: 0,
errors: [],
metadata: { outputData: aggregated }
};
});
// Data cleaning
this.transformationFunctions.set('cleanData', (data: any[], parameters: any) => {
const { removeNulls = true, trimStrings = true, validateNumbers = true } = parameters;
const cleaned: any[] = [];
const errors: any[] = [];
data.forEach((record, index) => {
try {
let cleanRecord = { ...record };
if (removeNulls) {
Object.keys(cleanRecord).forEach(key => {
if (cleanRecord[key] === null || cleanRecord[key] === undefined) {
delete cleanRecord[key];
}
});
}
if (trimStrings) {
Object.keys(cleanRecord).forEach(key => {
if (typeof cleanRecord[key] === 'string') {
cleanRecord[key] = cleanRecord[key].trim();
}
});
}
if (validateNumbers) {
Object.keys(cleanRecord).forEach(key => {
if (typeof cleanRecord[key] === 'string' && !isNaN(Number(cleanRecord[key]))) {
cleanRecord[key] = Number(cleanRecord[key]);
}
});
}
cleaned.push(cleanRecord);
} catch (error) {
errors.push({
record: index + 1,
message: error instanceof Error ? error.message : 'Cleaning error',
code: 'DATA_CLEANING_ERROR'
});
}
});
return {
recordsProcessed: data.length,
recordsSuccessful: cleaned.length,
recordsFailed: errors.length,
errors,
metadata: { outputData: cleaned }
};
});
}
private getTransformationUtils() {
return {
// Date utilities
formatDate: (date: Date | string, format: string = 'ISO') => {
const d = new Date(date);
switch (format) {
case 'ISO':
return d.toISOString();
case 'YYYY-MM-DD':
return d.toISOString().split('T')[0];
default:
return d.toString();
}
},
// Number utilities
round: (num: number, decimals: number = 2) => {
return Math.round(num * Math.pow(10, decimals)) / Math.pow(10, decimals);
},
// String utilities
slugify: (str: string) => {
return str.toLowerCase().replace(/[^a-z0-9]/g, '-').replace(/-+/g, '-');
},
// Market data utilities
calculatePercentageChange: (current: number, previous: number) => {
if (previous === 0) return 0;
return ((current - previous) / previous) * 100;
}
};
}
registerCustomTransformation(name: string, func: Function): void {
this.transformationFunctions.set(name, func);
logger.info(`✅ Registered custom transformation: ${name}`);
}
getAvailableTransformations(): string[] {
return Array.from(this.transformationFunctions.keys());
}
}

View file

@ -1,305 +0,0 @@
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('DataValidationService');
import { ValidationStep, ProcessingResult, ValidationRule } from '../types/DataPipeline';
import Joi from 'joi';
export class DataValidationService {
private validators: Map<string, Function> = new Map();
async initialize(): Promise<void> {
logger.info('🔄 Initializing Data Validation Service...');
// Register built-in validators
this.registerBuiltInValidators();
logger.info('✅ Data Validation Service initialized');
}
async validateData(step: ValidationStep, parameters: Record<string, any>): Promise<ProcessingResult> {
const startTime = Date.now();
logger.info(`✅ Starting data validation with ${step.rules.length} rules`);
const inputData = parameters.inputData || [];
const validRecords: any[] = [];
const errors: any[] = [];
let recordCount = 0;
try {
for (const record of inputData) {
recordCount++;
const recordErrors: any[] = [];
// Apply all validation rules to this record
for (const rule of step.rules) {
try {
const isValid = await this.applyValidationRule(record, rule);
if (!isValid) {
recordErrors.push({
record: recordCount,
field: rule.field,
message: rule.message || `Validation failed for rule: ${rule.type}`,
code: `VALIDATION_${rule.type.toUpperCase()}_FAILED`
});
}
} catch (error) {
recordErrors.push({
record: recordCount,
field: rule.field,
message: error instanceof Error ? error.message : 'Validation error',
code: 'VALIDATION_ERROR'
});
}
}
if (recordErrors.length === 0) {
validRecords.push(record);
} else {
errors.push(...recordErrors);
// Handle validation failure based on strategy
if (step.onFailure === 'stop') {
break;
}
}
}
const processingTime = Date.now() - startTime;
logger.info(`✅ Validation completed: ${validRecords.length}/${recordCount} records valid in ${processingTime}ms`);
return {
recordsProcessed: recordCount,
recordsSuccessful: validRecords.length,
recordsFailed: recordCount - validRecords.length,
errors,
metadata: {
validationRules: step.rules.length,
onFailure: step.onFailure,
processingTimeMs: processingTime,
outputData: validRecords
}
};
} catch (error) {
const processingTime = Date.now() - startTime;
logger.error(`❌ Data validation failed after ${processingTime}ms:`, error);
return {
recordsProcessed: recordCount,
recordsSuccessful: 0,
recordsFailed: recordCount,
errors: [{
record: 0,
message: error instanceof Error ? error.message : 'Unknown validation error',
code: 'VALIDATION_SERVICE_ERROR'
}],
metadata: { processingTimeMs: processingTime }
};
}
}
private async applyValidationRule(record: any, rule: ValidationRule): Promise<boolean> {
const fieldValue = this.getFieldValue(record, rule.field);
switch (rule.type) {
case 'required':
return this.validateRequired(fieldValue);
case 'type':
return this.validateType(fieldValue, rule.value);
case 'range':
return this.validateRange(fieldValue, rule.value);
case 'pattern':
return this.validatePattern(fieldValue, rule.value);
case 'custom':
return await this.validateCustom(record, rule);
default:
throw new Error(`Unknown validation rule type: ${rule.type}`);
}
}
private getFieldValue(record: any, fieldPath: string): any {
return fieldPath.split('.').reduce((obj, key) => obj?.[key], record);
}
private validateRequired(value: any): boolean {
return value !== null && value !== undefined && value !== '';
}
private validateType(value: any, expectedType: string): boolean {
if (value === null || value === undefined) {
return false;
}
switch (expectedType) {
case 'string':
return typeof value === 'string';
case 'number':
return typeof value === 'number' && !isNaN(value);
case 'boolean':
return typeof value === 'boolean';
case 'date':
return value instanceof Date || !isNaN(Date.parse(value));
case 'array':
return Array.isArray(value);
case 'object':
return typeof value === 'object' && !Array.isArray(value);
default:
return false;
}
}
private validateRange(value: any, range: { min?: number; max?: number }): boolean {
if (typeof value !== 'number') {
return false;
}
if (range.min !== undefined && value < range.min) {
return false;
}
if (range.max !== undefined && value > range.max) {
return false;
}
return true;
}
private validatePattern(value: any, pattern: string): boolean {
if (typeof value !== 'string') {
return false;
}
const regex = new RegExp(pattern);
return regex.test(value);
}
private async validateCustom(record: any, rule: ValidationRule): Promise<boolean> {
const validatorName = rule.value as string;
const validator = this.validators.get(validatorName);
if (!validator) {
throw new Error(`Custom validator not found: ${validatorName}`);
}
return await validator(record, rule.field);
}
private registerBuiltInValidators(): void {
// Stock symbol validator
this.validators.set('stockSymbol', (record: any, field: string) => {
const symbol = this.getFieldValue(record, field);
if (typeof symbol !== 'string') return false;
// Basic stock symbol validation: 1-5 uppercase letters
return /^[A-Z]{1,5}$/.test(symbol);
});
// Price validator
this.validators.set('stockPrice', (record: any, field: string) => {
const price = this.getFieldValue(record, field);
// Must be a positive number
return typeof price === 'number' && price > 0 && price < 1000000;
});
// Volume validator
this.validators.set('stockVolume', (record: any, field: string) => {
const volume = this.getFieldValue(record, field);
// Must be a non-negative integer
return Number.isInteger(volume) && volume >= 0;
});
// Market data timestamp validator
this.validators.set('marketTimestamp', (record: any, field: string) => {
const timestamp = this.getFieldValue(record, field);
if (!timestamp) return false;
const date = new Date(timestamp);
if (isNaN(date.getTime())) return false;
// Check if timestamp is within reasonable bounds (not too old or in future)
const now = new Date();
const oneYearAgo = new Date(now.getTime() - 365 * 24 * 60 * 60 * 1000);
const oneHourInFuture = new Date(now.getTime() + 60 * 60 * 1000);
return date >= oneYearAgo && date <= oneHourInFuture;
});
// Email validator
this.validators.set('email', (record: any, field: string) => {
const email = this.getFieldValue(record, field);
if (typeof email !== 'string') return false;
const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/;
return emailRegex.test(email);
});
// JSON schema validator
this.validators.set('jsonSchema', (record: any, field: string, schema?: any) => {
if (!schema) return false;
try {
const joiSchema = Joi.object(schema);
const { error } = joiSchema.validate(record);
return !error;
} catch {
return false;
}
});
// Data completeness validator
this.validators.set('completeness', (record: any, field: string) => {
const requiredFields = ['symbol', 'price', 'timestamp'];
return requiredFields.every(f => {
const value = this.getFieldValue(record, f);
return value !== null && value !== undefined && value !== '';
});
});
}
registerCustomValidator(name: string, validator: Function): void {
this.validators.set(name, validator);
logger.info(`✅ Registered custom validator: ${name}`);
}
getAvailableValidators(): string[] {
return Array.from(this.validators.keys());
}
async validateSchema(data: any[], schema: any): Promise<ProcessingResult> {
const joiSchema = Joi.array().items(Joi.object(schema));
const { error, value } = joiSchema.validate(data);
if (error) {
return {
recordsProcessed: data.length,
recordsSuccessful: 0,
recordsFailed: data.length,
errors: [{
record: 0,
message: error.message,
code: 'SCHEMA_VALIDATION_FAILED'
}],
metadata: { schemaValidation: true }
};
}
return {
recordsProcessed: data.length,
recordsSuccessful: data.length,
recordsFailed: 0,
errors: [],
metadata: {
schemaValidation: true,
outputData: value
}
};
}
}

View file

@ -1,178 +0,0 @@
// Data Pipeline Types
export interface DataPipeline {
id: string;
name: string;
description?: string;
status: PipelineStatus;
steps: PipelineSteps;
schedule?: PipelineSchedule;
metadata: Record<string, any>;
createdAt: Date;
updatedAt: Date;
}
export enum PipelineStatus {
DRAFT = 'draft',
ACTIVE = 'active',
PAUSED = 'paused',
DISABLED = 'disabled',
}
export interface PipelineSteps {
ingestion?: IngestionStep;
transformation?: TransformationStep;
validation?: ValidationStep;
qualityChecks?: QualityCheckStep;
}
export interface IngestionStep {
type: 'api' | 'file' | 'database' | 'stream';
source: DataSource;
configuration: Record<string, any>;
retryPolicy?: RetryPolicy;
}
export interface TransformationStep {
type: 'sql' | 'javascript' | 'python' | 'custom';
configuration: Record<string, any>;
schema?: DataSchema;
}
export interface ValidationStep {
rules: ValidationRule[];
onFailure: 'stop' | 'continue' | 'alert';
}
export interface QualityCheckStep {
checks: QualityCheck[];
thresholds: QualityThresholds;
}
export interface PipelineSchedule {
cronExpression: string;
enabled: boolean;
lastRun: Date | null;
nextRun: Date | null;
}
// Job Types
export interface PipelineJob {
id: string;
pipelineId: string;
status: JobStatus;
parameters: Record<string, any>;
createdAt: Date;
startedAt: Date | null;
completedAt: Date | null;
error: string | null;
metrics: JobMetrics;
}
export enum JobStatus {
PENDING = 'pending',
RUNNING = 'running',
COMPLETED = 'completed',
FAILED = 'failed',
CANCELLED = 'cancelled',
}
export interface JobMetrics {
recordsProcessed: number;
recordsSuccessful: number;
recordsFailed: number;
processingTimeMs: number;
}
// Data Source Types
export interface DataSource {
type: 'api' | 'file' | 'database' | 'stream';
connection: ConnectionConfig;
format?: 'json' | 'csv' | 'xml' | 'parquet' | 'avro';
}
export interface ConnectionConfig {
url?: string;
host?: string;
port?: number;
database?: string;
username?: string;
password?: string;
apiKey?: string;
headers?: Record<string, string>;
params?: Record<string, any>;
}
// Schema Types
export interface DataSchema {
fields: SchemaField[];
constraints?: SchemaConstraint[];
}
export interface SchemaField {
name: string;
type: 'string' | 'number' | 'boolean' | 'date' | 'object' | 'array';
required: boolean;
nullable: boolean;
format?: string;
description?: string;
}
export interface SchemaConstraint {
type: 'unique' | 'reference' | 'range' | 'pattern';
field: string;
value: any;
}
// Validation Types
export interface ValidationRule {
field: string;
type: 'required' | 'type' | 'range' | 'pattern' | 'custom';
value: any;
message?: string;
}
// Quality Check Types
export interface QualityCheck {
name: string;
type: 'completeness' | 'accuracy' | 'consistency' | 'validity' | 'uniqueness';
field?: string;
condition: string;
threshold: number;
}
export interface QualityThresholds {
error: number; // 0-100 percentage
warning: number; // 0-100 percentage
}
// Processing Result Types
export interface ProcessingResult {
recordsProcessed: number;
recordsSuccessful: number;
recordsFailed: number;
errors: ProcessingError[];
metadata: Record<string, any>;
}
export interface ProcessingError {
record: number;
field?: string;
message: string;
code?: string;
}
// Retry Policy Types
export interface RetryPolicy {
maxAttempts: number;
backoffStrategy: 'fixed' | 'exponential' | 'linear';
initialDelay: number;
maxDelay: number;
}

View file

@ -1,23 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"module": "ESNext",
"moduleResolution": "bundler",
"noEmit": true,
"allowImportingTsExtensions": true,
"declarationMap": true,
"types": ["bun-types"]
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"], "references": [
{ "path": "../../../libs/api-client" },
{ "path": "../../../libs/config" },
{ "path": "../../../libs/event-bus" },
{ "path": "../../../libs/http-client" },
{ "path": "../../../libs/logger" },
{ "path": "../../../libs/types" },
{ "path": "../../../libs/utils" },
]
}

View file

@ -1,41 +0,0 @@
{
"name": "feature-store",
"version": "1.0.0",
"description": "ML feature management and serving service",
"main": "src/index.ts",
"scripts": {
"dev": "bun run --watch src/index.ts",
"start": "bun run src/index.ts",
"build": "bun build src/index.ts --outdir=dist",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit"
},
"dependencies": {
"@stock-bot/types": "*",
"@stock-bot/event-bus": "*",
"@stock-bot/utils": "*",
"@stock-bot/api-client": "*",
"hono": "^4.6.3",
"ioredis": "^5.4.1",
"node-fetch": "^3.3.2",
"date-fns": "^2.30.0",
"lodash": "^4.17.21",
"compression": "^1.7.4",
"cors": "^2.8.5",
"helmet": "^7.1.0"
},
"devDependencies": {
"@types/bun": "latest",
"@types/lodash": "^4.14.200",
"@types/compression": "^1.7.5",
"@types/cors": "^2.8.17",
"typescript": "^5.3.0",
"eslint": "^8.55.0",
"@typescript-eslint/eslint-plugin": "^6.13.1",
"@typescript-eslint/parser": "^6.13.1"
},
"peerDependencies": {
"typescript": "^5.0.0"
}
}

View file

@ -1,220 +0,0 @@
import { Context } from 'hono';
import { FeatureComputationService } from '../services/FeatureComputationService';
import { Logger } from '@stock-bot/utils';
import {
ComputationJob,
CreateComputationJobRequest,
UpdateComputationJobRequest
} from '../types/FeatureStore';
export class ComputationController {
constructor(
private computationService: FeatureComputationService,
private logger: Logger
) {}
async createComputationJob(c: Context) {
try {
const request: CreateComputationJobRequest = await c.req.json();
const job = await this.computationService.createComputationJob(request);
this.logger.info('Computation job created', { jobId: job.id });
return c.json({
success: true,
data: job
}, 201);
} catch (error) {
this.logger.error('Failed to create computation job', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async getComputationJob(c: Context) {
try {
const jobId = c.req.param('id');
const job = await this.computationService.getComputationJob(jobId);
if (!job) {
return c.json({
success: false,
error: 'Computation job not found'
}, 404);
}
return c.json({
success: true,
data: job
});
} catch (error) {
this.logger.error('Failed to get computation job', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async updateComputationJob(c: Context) {
try {
const jobId = c.req.param('id');
const request: UpdateComputationJobRequest = await c.req.json();
const job = await this.computationService.updateComputationJob(jobId, request);
if (!job) {
return c.json({
success: false,
error: 'Computation job not found'
}, 404);
}
this.logger.info('Computation job updated', { jobId });
return c.json({
success: true,
data: job
});
} catch (error) {
this.logger.error('Failed to update computation job', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async deleteComputationJob(c: Context) {
try {
const jobId = c.req.param('id');
await this.computationService.deleteComputationJob(jobId);
this.logger.info('Computation job deleted', { jobId });
return c.json({
success: true,
message: 'Computation job deleted successfully'
});
} catch (error) {
this.logger.error('Failed to delete computation job', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async listComputationJobs(c: Context) {
try {
const featureGroupId = c.req.query('featureGroupId');
const status = c.req.query('status');
const jobs = await this.computationService.listComputationJobs({
featureGroupId,
status: status as any
});
return c.json({
success: true,
data: jobs
});
} catch (error) {
this.logger.error('Failed to list computation jobs', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async executeComputationJob(c: Context) {
try {
const jobId = c.req.param('id');
const result = await this.computationService.executeComputationJob(jobId);
this.logger.info('Computation job executed', { jobId, result });
return c.json({
success: true,
data: result
});
} catch (error) {
this.logger.error('Failed to execute computation job', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async scheduleComputationJob(c: Context) {
try {
const jobId = c.req.param('id');
const { schedule } = await c.req.json();
await this.computationService.scheduleComputationJob(jobId, schedule);
this.logger.info('Computation job scheduled', { jobId, schedule });
return c.json({
success: true,
message: 'Computation job scheduled successfully'
});
} catch (error) {
this.logger.error('Failed to schedule computation job', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async unscheduleComputationJob(c: Context) {
try {
const jobId = c.req.param('id');
await this.computationService.unscheduleComputationJob(jobId);
this.logger.info('Computation job unscheduled', { jobId });
return c.json({
success: true,
message: 'Computation job unscheduled successfully'
});
} catch (error) {
this.logger.error('Failed to unschedule computation job', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async getComputationJobHistory(c: Context) {
try {
const jobId = c.req.param('id');
const limit = parseInt(c.req.query('limit') || '10');
const offset = parseInt(c.req.query('offset') || '0');
const history = await this.computationService.getComputationJobHistory(jobId, limit, offset);
return c.json({
success: true,
data: history
});
} catch (error) {
this.logger.error('Failed to get computation job history', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
}

View file

@ -1,226 +0,0 @@
import { Context } from 'hono';
import { FeatureStoreService } from '../services/FeatureStoreService';
import { Logger } from '@stock-bot/utils';
import {
FeatureGroup,
CreateFeatureGroupRequest,
UpdateFeatureGroupRequest,
FeatureValue,
GetFeaturesRequest
} from '../types/FeatureStore';
export class FeatureController {
constructor(
private featureStoreService: FeatureStoreService,
private logger: Logger
) {}
async createFeatureGroup(c: Context) {
try {
const request: CreateFeatureGroupRequest = await c.req.json();
const featureGroup = await this.featureStoreService.createFeatureGroup(request);
this.logger.info('Feature group created', { featureGroupId: featureGroup.id });
return c.json({
success: true,
data: featureGroup
}, 201);
} catch (error) {
this.logger.error('Failed to create feature group', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async getFeatureGroup(c: Context) {
try {
const featureGroupId = c.req.param('id');
const featureGroup = await this.featureStoreService.getFeatureGroup(featureGroupId);
if (!featureGroup) {
return c.json({
success: false,
error: 'Feature group not found'
}, 404);
}
return c.json({
success: true,
data: featureGroup
});
} catch (error) {
this.logger.error('Failed to get feature group', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async updateFeatureGroup(c: Context) {
try {
const featureGroupId = c.req.param('id');
const request: UpdateFeatureGroupRequest = await c.req.json();
const featureGroup = await this.featureStoreService.updateFeatureGroup(featureGroupId, request);
if (!featureGroup) {
return c.json({
success: false,
error: 'Feature group not found'
}, 404);
}
this.logger.info('Feature group updated', { featureGroupId });
return c.json({
success: true,
data: featureGroup
});
} catch (error) {
this.logger.error('Failed to update feature group', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async deleteFeatureGroup(c: Context) {
try {
const featureGroupId = c.req.param('id');
await this.featureStoreService.deleteFeatureGroup(featureGroupId);
this.logger.info('Feature group deleted', { featureGroupId });
return c.json({
success: true,
message: 'Feature group deleted successfully'
});
} catch (error) {
this.logger.error('Failed to delete feature group', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async listFeatureGroups(c: Context) {
try {
const featureGroups = await this.featureStoreService.listFeatureGroups();
return c.json({
success: true,
data: featureGroups
});
} catch (error) {
this.logger.error('Failed to list feature groups', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async getFeatures(c: Context) {
try {
const featureGroupId = c.req.param('id');
const entityId = c.req.query('entityId');
const timestamp = c.req.query('timestamp');
if (!entityId) {
return c.json({
success: false,
error: 'entityId query parameter is required'
}, 400);
}
const request: GetFeaturesRequest = {
featureGroupId,
entityId,
timestamp: timestamp ? new Date(timestamp) : undefined
};
const features = await this.featureStoreService.getFeatures(request);
return c.json({
success: true,
data: features
});
} catch (error) {
this.logger.error('Failed to get features', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async storeFeatures(c: Context) {
try {
const featureGroupId = c.req.param('id');
const features: FeatureValue[] = await c.req.json();
await this.featureStoreService.storeFeatures(featureGroupId, features);
this.logger.info('Features stored', {
featureGroupId,
featureCount: features.length
});
return c.json({
success: true,
message: 'Features stored successfully'
});
} catch (error) {
this.logger.error('Failed to store features', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async getFeatureHistory(c: Context) {
try {
const featureGroupId = c.req.param('id');
const featureName = c.req.param('featureName');
const entityId = c.req.query('entityId');
const startTime = c.req.query('startTime');
const endTime = c.req.query('endTime');
if (!entityId) {
return c.json({
success: false,
error: 'entityId query parameter is required'
}, 400);
}
const history = await this.featureStoreService.getFeatureHistory(
featureGroupId,
featureName,
entityId,
startTime ? new Date(startTime) : undefined,
endTime ? new Date(endTime) : undefined
);
return c.json({
success: true,
data: history
});
} catch (error) {
this.logger.error('Failed to get feature history', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
}

View file

@ -1,166 +0,0 @@
import { Context } from 'hono';
import { Logger } from '@stock-bot/utils';
export class HealthController {
constructor(private logger: Logger) {}
async getHealth(c: Context) {
try {
const health = {
status: 'healthy',
timestamp: new Date().toISOString(),
service: 'feature-store',
version: '1.0.0',
uptime: process.uptime(),
memory: {
used: Math.round((process.memoryUsage().heapUsed / 1024 / 1024) * 100) / 100,
total: Math.round((process.memoryUsage().heapTotal / 1024 / 1024) * 100) / 100
},
dependencies: {
redis: await this.checkRedisHealth(),
database: await this.checkDatabaseHealth(),
eventBus: await this.checkEventBusHealth()
}
};
return c.json(health);
} catch (error) {
this.logger.error('Health check failed', { error });
return c.json({
status: 'unhealthy',
timestamp: new Date().toISOString(),
service: 'feature-store',
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async getReadiness(c: Context) {
try {
const readiness = {
status: 'ready',
timestamp: new Date().toISOString(),
service: 'feature-store',
checks: {
onlineStore: await this.checkOnlineStoreReadiness(),
offlineStore: await this.checkOfflineStoreReadiness(),
metadataStore: await this.checkMetadataStoreReadiness(),
computationEngine: await this.checkComputationEngineReadiness()
}
};
const isReady = Object.values(readiness.checks).every(check => check.status === 'ready');
return c.json(readiness, isReady ? 200 : 503);
} catch (error) {
this.logger.error('Readiness check failed', { error });
return c.json({
status: 'not_ready',
timestamp: new Date().toISOString(),
service: 'feature-store',
error: error instanceof Error ? error.message : 'Unknown error'
}, 503);
}
}
async getLiveness(c: Context) {
try {
const liveness = {
status: 'alive',
timestamp: new Date().toISOString(),
service: 'feature-store',
pid: process.pid,
uptime: process.uptime()
};
return c.json(liveness);
} catch (error) {
this.logger.error('Liveness check failed', { error });
return c.json({
status: 'dead',
timestamp: new Date().toISOString(),
service: 'feature-store',
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
private async checkRedisHealth(): Promise<{ status: string; latency?: number }> {
try {
const start = Date.now();
// TODO: Implement actual Redis health check
const latency = Date.now() - start;
return { status: 'healthy', latency };
} catch (error) {
return { status: 'unhealthy' };
}
}
private async checkDatabaseHealth(): Promise<{ status: string; latency?: number }> {
try {
const start = Date.now();
// TODO: Implement actual database health check
const latency = Date.now() - start;
return { status: 'healthy', latency };
} catch (error) {
return { status: 'unhealthy' };
}
}
private async checkEventBusHealth(): Promise<{ status: string }> {
try {
// TODO: Implement actual event bus health check
return { status: 'healthy' };
} catch (error) {
return { status: 'unhealthy' };
}
}
private async checkOnlineStoreReadiness(): Promise<{ status: string; message?: string }> {
try {
// TODO: Implement actual online store readiness check
return { status: 'ready' };
} catch (error) {
return {
status: 'not_ready',
message: error instanceof Error ? error.message : 'Unknown error'
};
}
}
private async checkOfflineStoreReadiness(): Promise<{ status: string; message?: string }> {
try {
// TODO: Implement actual offline store readiness check
return { status: 'ready' };
} catch (error) {
return {
status: 'not_ready',
message: error instanceof Error ? error.message : 'Unknown error'
};
}
}
private async checkMetadataStoreReadiness(): Promise<{ status: string; message?: string }> {
try {
// TODO: Implement actual metadata store readiness check
return { status: 'ready' };
} catch (error) {
return {
status: 'not_ready',
message: error instanceof Error ? error.message : 'Unknown error'
};
}
}
private async checkComputationEngineReadiness(): Promise<{ status: string; message?: string }> {
try {
// TODO: Implement actual computation engine readiness check
return { status: 'ready' };
} catch (error) {
return {
status: 'not_ready',
message: error instanceof Error ? error.message : 'Unknown error'
};
}
}
}

View file

@ -1,123 +0,0 @@
import { Context } from 'hono';
import { FeatureMonitoringService } from '../services/FeatureMonitoringService';
import { Logger } from '@stock-bot/utils';
import {
FeatureMonitoringConfig,
FeatureValue
} from '../types/FeatureStore';
export class MonitoringController {
constructor(
private monitoringService: FeatureMonitoringService,
private logger: Logger
) {}
async startMonitoring(c: Context) {
try {
const featureGroupId = c.req.param('id');
const config: FeatureMonitoringConfig = await c.req.json();
await this.monitoringService.startMonitoring(featureGroupId, config);
this.logger.info('Monitoring started', { featureGroupId });
return c.json({
success: true,
message: 'Monitoring started successfully'
});
} catch (error) {
this.logger.error('Failed to start monitoring', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async stopMonitoring(c: Context) {
try {
const featureGroupId = c.req.param('id');
await this.monitoringService.stopMonitoring(featureGroupId);
this.logger.info('Monitoring stopped', { featureGroupId });
return c.json({
success: true,
message: 'Monitoring stopped successfully'
});
} catch (error) {
this.logger.error('Failed to stop monitoring', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async detectDrift(c: Context) {
try {
const featureGroupId = c.req.param('id');
const recentData: FeatureValue[] = await c.req.json();
const alerts = await this.monitoringService.detectDrift(featureGroupId, recentData);
this.logger.info('Drift detection completed', {
featureGroupId,
alertsCount: alerts.length
});
return c.json({
success: true,
data: alerts
});
} catch (error) {
this.logger.error('Failed to detect drift', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async getMonitoringMetrics(c: Context) {
try {
const featureGroupId = c.req.param('id');
const metrics = await this.monitoringService.getMonitoringMetrics(featureGroupId);
return c.json({
success: true,
data: metrics
});
} catch (error) {
this.logger.error('Failed to get monitoring metrics', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
async updateMonitoringConfig(c: Context) {
try {
const featureGroupId = c.req.param('id');
const config: FeatureMonitoringConfig = await c.req.json();
await this.monitoringService.updateMonitoringConfig(featureGroupId, config);
this.logger.info('Monitoring config updated', { featureGroupId });
return c.json({
success: true,
message: 'Monitoring configuration updated successfully'
});
} catch (error) {
this.logger.error('Failed to update monitoring config', { error });
return c.json({
success: false,
error: error instanceof Error ? error.message : 'Unknown error'
}, 500);
}
}
}

View file

@ -1,41 +0,0 @@
import { Hono } from 'hono';
import { cors } from 'hono/cors';
import { logger } from 'hono/logger';
// Controllers
import { HealthController } from './controllers/HealthController';
const app = new Hono();
// Middleware
app.use('*', cors());
app.use('*', logger());
// Initialize logger for services
const appLogger = { info: console.log, error: console.error, warn: console.warn, debug: console.log };
// Controllers
const healthController = new HealthController(appLogger);
// Health endpoints
app.get('/health', healthController.getHealth.bind(healthController));
app.get('/health/readiness', healthController.getReadiness.bind(healthController));
app.get('/health/liveness', healthController.getLiveness.bind(healthController));
// API endpoints will be implemented as services are completed
app.get('/api/v1/feature-groups', async (c) => {
return c.json({ message: 'Feature groups endpoint - not implemented yet' });
});
app.post('/api/v1/feature-groups', async (c) => {
return c.json({ message: 'Create feature group endpoint - not implemented yet' });
});
const port = process.env.PORT || 3003;
console.log(`Feature Store service running on port ${port}`);
export default {
port,
fetch: app.fetch,
};

View file

@ -1,167 +0,0 @@
import { logger } from '@stock-bot/utils';
import {
FeatureComputation,
ComputationStatus,
ComputationError
} from '../types/FeatureStore';
export class FeatureComputationService {
private computations: Map<string, FeatureComputation> = new Map();
private runningComputations: Set<string> = new Set();
async initialize(): Promise<void> {
logger.info('🔄 Initializing Feature Computation Service...');
this.computations.clear();
this.runningComputations.clear();
logger.info('✅ Feature Computation Service initialized');
}
async startComputation(
featureGroupId: string,
parameters: Record<string, any>
): Promise<FeatureComputation> {
const computation: FeatureComputation = {
id: this.generateComputationId(),
featureGroupId,
status: ComputationStatus.PENDING,
startTime: new Date(),
recordsProcessed: 0,
recordsGenerated: 0,
errors: [],
metadata: parameters,
};
this.computations.set(computation.id, computation);
// Start computation asynchronously
this.executeComputation(computation);
logger.info(`⚙️ Started feature computation: ${computation.id} for group: ${featureGroupId}`);
return computation;
}
async getComputation(id: string): Promise<FeatureComputation | null> {
return this.computations.get(id) || null;
}
async listComputations(featureGroupId?: string): Promise<FeatureComputation[]> {
const computations = Array.from(this.computations.values());
return featureGroupId ?
computations.filter(c => c.featureGroupId === featureGroupId) :
computations;
}
async cancelComputation(id: string): Promise<boolean> {
const computation = this.computations.get(id);
if (!computation) {
return false;
}
if (computation.status === ComputationStatus.RUNNING) {
computation.status = ComputationStatus.CANCELLED;
computation.endTime = new Date();
this.runningComputations.delete(id);
logger.info(`❌ Cancelled computation: ${id}`);
return true;
}
return false;
}
private async executeComputation(computation: FeatureComputation): Promise<void> {
try {
computation.status = ComputationStatus.RUNNING;
this.runningComputations.add(computation.id);
logger.info(`⚙️ Executing computation: ${computation.id}`);
// Simulate computation work
const totalRecords = 1000; // Mock value
const batchSize = 100;
for (let processed = 0; processed < totalRecords; processed += batchSize) {
// Check if computation was cancelled
if (computation.status === ComputationStatus.CANCELLED) {
return;
}
// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 100));
const currentBatch = Math.min(batchSize, totalRecords - processed);
computation.recordsProcessed += currentBatch;
computation.recordsGenerated += currentBatch; // Assume 1:1 for simplicity
// Simulate some errors
if (Math.random() < 0.05) { // 5% error rate
const error: ComputationError = {
entityId: `entity_${processed}`,
error: 'Simulated processing error',
timestamp: new Date(),
};
computation.errors.push(error);
}
}
computation.status = ComputationStatus.COMPLETED;
computation.endTime = new Date();
this.runningComputations.delete(computation.id);
logger.info(`✅ Completed computation: ${computation.id}`);
} catch (error) {
computation.status = ComputationStatus.FAILED;
computation.endTime = new Date();
this.runningComputations.delete(computation.id);
const computationError: ComputationError = {
entityId: 'unknown',
error: error instanceof Error ? error.message : 'Unknown error',
timestamp: new Date(),
};
computation.errors.push(computationError);
logger.error(`❌ Computation failed: ${computation.id}`, error);
}
}
private generateComputationId(): string {
return `comp_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
async getComputationStats(): Promise<any> {
const computations = Array.from(this.computations.values());
return {
total: computations.length,
running: this.runningComputations.size,
byStatus: {
pending: computations.filter(c => c.status === ComputationStatus.PENDING).length,
running: computations.filter(c => c.status === ComputationStatus.RUNNING).length,
completed: computations.filter(c => c.status === ComputationStatus.COMPLETED).length,
failed: computations.filter(c => c.status === ComputationStatus.FAILED).length,
cancelled: computations.filter(c => c.status === ComputationStatus.CANCELLED).length,
},
totalRecordsProcessed: computations.reduce((sum, c) => sum + c.recordsProcessed, 0),
totalRecordsGenerated: computations.reduce((sum, c) => sum + c.recordsGenerated, 0),
totalErrors: computations.reduce((sum, c) => sum + c.errors.length, 0),
};
}
async shutdown(): Promise<void> {
logger.info('🔄 Shutting down Feature Computation Service...');
// Cancel all running computations
for (const computationId of this.runningComputations) {
await this.cancelComputation(computationId);
}
this.computations.clear();
this.runningComputations.clear();
logger.info('✅ Feature Computation Service shutdown complete');
}
}

View file

@ -1,246 +0,0 @@
import { EventBus } from '@stock-bot/event-bus';
import { Logger } from '@stock-bot/utils';
import {
FeatureGroup,
FeatureDriftAlert,
FeatureMonitoringConfig,
FeatureMonitoringMetrics,
FeatureValue,
DriftDetectionMethod
} from '../types/FeatureStore';
export interface FeatureMonitoringService {
startMonitoring(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void>;
stopMonitoring(featureGroupId: string): Promise<void>;
detectDrift(featureGroupId: string, recentData: FeatureValue[]): Promise<FeatureDriftAlert[]>;
getMonitoringMetrics(featureGroupId: string): Promise<FeatureMonitoringMetrics>;
updateMonitoringConfig(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void>;
}
export class FeatureMonitoringServiceImpl implements FeatureMonitoringService {
private monitoringJobs: Map<string, NodeJS.Timeout> = new Map();
private baselineStats: Map<string, any> = new Map();
constructor(
private eventBus: EventBus,
private logger: Logger
) {}
async startMonitoring(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void> {
try {
// Stop existing monitoring if running
await this.stopMonitoring(featureGroupId);
// Start new monitoring job
const interval = setInterval(async () => {
await this.runMonitoringCheck(featureGroupId, config);
}, config.checkInterval * 1000);
this.monitoringJobs.set(featureGroupId, interval);
this.logger.info(`Started monitoring for feature group: ${featureGroupId}`);
await this.eventBus.emit('feature.monitoring.started', {
featureGroupId,
config,
timestamp: new Date()
});
} catch (error) {
this.logger.error('Failed to start feature monitoring', { featureGroupId, error });
throw error;
}
}
async stopMonitoring(featureGroupId: string): Promise<void> {
try {
const job = this.monitoringJobs.get(featureGroupId);
if (job) {
clearInterval(job);
this.monitoringJobs.delete(featureGroupId);
this.logger.info(`Stopped monitoring for feature group: ${featureGroupId}`);
await this.eventBus.emit('feature.monitoring.stopped', {
featureGroupId,
timestamp: new Date()
});
}
} catch (error) {
this.logger.error('Failed to stop feature monitoring', { featureGroupId, error });
throw error;
}
}
async detectDrift(featureGroupId: string, recentData: FeatureValue[]): Promise<FeatureDriftAlert[]> {
try {
const alerts: FeatureDriftAlert[] = [];
const baseline = this.baselineStats.get(featureGroupId);
if (!baseline) {
// No baseline available, collect current data as baseline
await this.updateBaseline(featureGroupId, recentData);
return alerts;
}
// Group data by feature name
const featureData = this.groupByFeature(recentData);
for (const [featureName, values] of featureData) {
const currentStats = this.calculateStatistics(values);
const baselineFeatureStats = baseline[featureName];
if (!baselineFeatureStats) continue;
// Detect drift using various methods
const driftScore = await this.calculateDriftScore(
baselineFeatureStats,
currentStats,
DriftDetectionMethod.KOLMOGOROV_SMIRNOV
);
if (driftScore > 0.1) { // Threshold for drift detection
alerts.push({
id: `drift_${featureGroupId}_${featureName}_${Date.now()}`,
featureGroupId,
featureName,
driftScore,
severity: driftScore > 0.3 ? 'high' : driftScore > 0.2 ? 'medium' : 'low',
detectionMethod: DriftDetectionMethod.KOLMOGOROV_SMIRNOV,
baselineStats: baselineFeatureStats,
currentStats,
detectedAt: new Date(),
message: `Feature drift detected for ${featureName} with score ${driftScore.toFixed(3)}`
});
}
}
if (alerts.length > 0) {
await this.eventBus.emit('feature.drift.detected', {
featureGroupId,
alerts,
timestamp: new Date()
});
}
return alerts;
} catch (error) {
this.logger.error('Failed to detect drift', { featureGroupId, error });
throw error;
}
}
async getMonitoringMetrics(featureGroupId: string): Promise<FeatureMonitoringMetrics> {
try {
const isActive = this.monitoringJobs.has(featureGroupId);
const baseline = this.baselineStats.get(featureGroupId);
return {
featureGroupId,
isActive,
lastCheckTime: new Date(),
totalChecks: 0, // Would be stored in persistent storage
driftAlertsCount: 0, // Would be queried from alert storage
averageDriftScore: 0,
featuresMonitored: baseline ? Object.keys(baseline).length : 0,
uptime: isActive ? Date.now() : 0 // Would calculate actual uptime
};
} catch (error) {
this.logger.error('Failed to get monitoring metrics', { featureGroupId, error });
throw error;
}
}
async updateMonitoringConfig(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void> {
try {
// Restart monitoring with new config
if (this.monitoringJobs.has(featureGroupId)) {
await this.stopMonitoring(featureGroupId);
await this.startMonitoring(featureGroupId, config);
}
this.logger.info(`Updated monitoring config for feature group: ${featureGroupId}`);
} catch (error) {
this.logger.error('Failed to update monitoring config', { featureGroupId, error });
throw error;
}
}
private async runMonitoringCheck(featureGroupId: string, config: FeatureMonitoringConfig): Promise<void> {
try {
// In a real implementation, this would fetch recent data from the feature store
const recentData: FeatureValue[] = []; // Placeholder
await this.detectDrift(featureGroupId, recentData);
} catch (error) {
this.logger.error('Monitoring check failed', { featureGroupId, error });
}
}
private async updateBaseline(featureGroupId: string, data: FeatureValue[]): Promise<void> {
const featureData = this.groupByFeature(data);
const baseline: Record<string, any> = {};
for (const [featureName, values] of featureData) {
baseline[featureName] = this.calculateStatistics(values);
}
this.baselineStats.set(featureGroupId, baseline);
}
private groupByFeature(data: FeatureValue[]): Map<string, number[]> {
const grouped = new Map<string, number[]>();
for (const item of data) {
if (!grouped.has(item.featureName)) {
grouped.set(item.featureName, []);
}
grouped.get(item.featureName)!.push(item.value as number);
}
return grouped;
}
private calculateStatistics(values: number[]): any {
const sorted = values.sort((a, b) => a - b);
const n = values.length;
const mean = values.reduce((sum, val) => sum + val, 0) / n;
const variance = values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / n;
const stdDev = Math.sqrt(variance);
return {
count: n,
mean,
stdDev,
min: sorted[0],
max: sorted[n - 1],
median: n % 2 === 0 ? (sorted[n/2 - 1] + sorted[n/2]) / 2 : sorted[Math.floor(n/2)],
q25: sorted[Math.floor(n * 0.25)],
q75: sorted[Math.floor(n * 0.75)]
};
}
private async calculateDriftScore(
baseline: any,
current: any,
method: DriftDetectionMethod
): Promise<number> {
switch (method) {
case DriftDetectionMethod.KOLMOGOROV_SMIRNOV:
// Simplified KS test approximation
return Math.abs(baseline.mean - current.mean) / (baseline.stdDev + current.stdDev + 1e-8);
case DriftDetectionMethod.POPULATION_STABILITY_INDEX:
// Simplified PSI calculation
const expectedRatio = baseline.mean / (baseline.mean + current.mean + 1e-8);
const actualRatio = current.mean / (baseline.mean + current.mean + 1e-8);
return Math.abs(expectedRatio - actualRatio);
case DriftDetectionMethod.JENSEN_SHANNON_DIVERGENCE:
// Simplified JS divergence approximation
return Math.min(1.0, Math.abs(baseline.mean - current.mean) / Math.max(baseline.stdDev, current.stdDev, 1e-8));
default:
return 0;
}
}
}

View file

@ -1,195 +0,0 @@
import { logger } from '@stock-bot/utils';
import { FeatureStatistics, HistogramBucket, ValueCount } from '../types/FeatureStore';
export class FeatureStatisticsService {
private statistics: Map<string, FeatureStatistics> = new Map();
async initialize(): Promise<void> {
logger.info('🔄 Initializing Feature Statistics Service...');
this.statistics.clear();
logger.info('✅ Feature Statistics Service initialized');
}
async computeStatistics(
featureGroupId: string,
featureName: string,
data: any[]
): Promise<FeatureStatistics> {
const values = data.map(item => item[featureName]).filter(v => v !== null && v !== undefined);
const statistics: FeatureStatistics = {
featureGroupId,
featureName,
statistics: {
count: data.length,
nullCount: data.length - values.length,
distinctCount: new Set(values).size,
},
computedAt: new Date(),
};
// Compute numerical statistics if applicable
const numericalValues = values.filter(v => typeof v === 'number');
if (numericalValues.length > 0) {
const sorted = numericalValues.sort((a, b) => a - b);
const sum = numericalValues.reduce((acc, val) => acc + val, 0);
const mean = sum / numericalValues.length;
statistics.statistics.min = sorted[0];
statistics.statistics.max = sorted[sorted.length - 1];
statistics.statistics.mean = mean;
statistics.statistics.median = this.calculateMedian(sorted);
statistics.statistics.stdDev = this.calculateStandardDeviation(numericalValues, mean);
statistics.statistics.percentiles = this.calculatePercentiles(sorted);
statistics.statistics.histogram = this.calculateHistogram(numericalValues);
}
// Compute top values for categorical data
statistics.statistics.topValues = this.calculateTopValues(values);
const key = `${featureGroupId}.${featureName}`;
this.statistics.set(key, statistics);
logger.info(`📊 Computed statistics for feature: ${featureGroupId}.${featureName}`);
return statistics;
}
async getStatistics(featureGroupId: string, featureName: string): Promise<FeatureStatistics | null> {
const key = `${featureGroupId}.${featureName}`;
return this.statistics.get(key) || null;
}
async getFeatureGroupStatistics(featureGroupId: string): Promise<FeatureStatistics[]> {
const groupStats: FeatureStatistics[] = [];
for (const [key, stats] of this.statistics.entries()) {
if (stats.featureGroupId === featureGroupId) {
groupStats.push(stats);
}
}
return groupStats;
}
async getAllStatistics(): Promise<FeatureStatistics[]> {
return Array.from(this.statistics.values());
}
async deleteStatistics(featureGroupId: string, featureName?: string): Promise<void> {
if (featureName) {
const key = `${featureGroupId}.${featureName}`;
this.statistics.delete(key);
} else {
// Delete all statistics for the feature group
const keysToDelete: string[] = [];
for (const [key, stats] of this.statistics.entries()) {
if (stats.featureGroupId === featureGroupId) {
keysToDelete.push(key);
}
}
for (const key of keysToDelete) {
this.statistics.delete(key);
}
}
}
private calculateMedian(sortedValues: number[]): number {
const length = sortedValues.length;
if (length % 2 === 0) {
return (sortedValues[length / 2 - 1] + sortedValues[length / 2]) / 2;
} else {
return sortedValues[Math.floor(length / 2)];
}
}
private calculateStandardDeviation(values: number[], mean: number): number {
const squaredDifferences = values.map(value => Math.pow(value - mean, 2));
const avgSquaredDiff = squaredDifferences.reduce((acc, val) => acc + val, 0) / values.length;
return Math.sqrt(avgSquaredDiff);
}
private calculatePercentiles(sortedValues: number[]): Record<string, number> {
const percentiles = [5, 10, 25, 50, 75, 90, 95];
const result: Record<string, number> = {};
for (const p of percentiles) {
const index = (p / 100) * (sortedValues.length - 1);
if (Number.isInteger(index)) {
result[`p${p}`] = sortedValues[index];
} else {
const lower = Math.floor(index);
const upper = Math.ceil(index);
const weight = index - lower;
result[`p${p}`] = sortedValues[lower] * (1 - weight) + sortedValues[upper] * weight;
}
}
return result;
}
private calculateHistogram(values: number[], buckets: number = 10): HistogramBucket[] {
const min = Math.min(...values);
const max = Math.max(...values);
const bucketSize = (max - min) / buckets;
const histogram: HistogramBucket[] = [];
for (let i = 0; i < buckets; i++) {
const bucketMin = min + i * bucketSize;
const bucketMax = i === buckets - 1 ? max : min + (i + 1) * bucketSize;
const count = values.filter(v => v >= bucketMin && v < bucketMax).length;
histogram.push({
min: bucketMin,
max: bucketMax,
count,
});
}
return histogram;
}
private calculateTopValues(values: any[], limit: number = 10): ValueCount[] {
const valueCounts = new Map<any, number>();
for (const value of values) {
valueCounts.set(value, (valueCounts.get(value) || 0) + 1);
}
const sortedCounts = Array.from(valueCounts.entries())
.map(([value, count]) => ({
value,
count,
percentage: (count / values.length) * 100,
}))
.sort((a, b) => b.count - a.count)
.slice(0, limit);
return sortedCounts;
}
async getStatisticsSummary(): Promise<any> {
const allStats = Array.from(this.statistics.values());
return {
totalFeatures: allStats.length,
totalRecords: allStats.reduce((sum, s) => sum + s.statistics.count, 0),
totalNullValues: allStats.reduce((sum, s) => sum + s.statistics.nullCount, 0),
featureGroups: new Set(allStats.map(s => s.featureGroupId)).size,
lastComputed: allStats.length > 0 ?
Math.max(...allStats.map(s => s.computedAt.getTime())) : null,
};
}
async shutdown(): Promise<void> {
logger.info('🔄 Shutting down Feature Statistics Service...');
this.statistics.clear();
logger.info('✅ Feature Statistics Service shutdown complete');
}
}

View file

@ -1,313 +0,0 @@
import { EventBus } from '@stock-bot/event-bus';
import { logger } from '@stock-bot/utils';
import {
FeatureGroup,
FeatureGroupStatus,
FeatureVector,
FeatureRequest,
FeatureResponse,
FeatureStorageConfig,
FeatureRegistry
} from '../types/FeatureStore';
import { OnlineStore } from './storage/OnlineStore';
import { OfflineStore } from './storage/OfflineStore';
import { MetadataStore } from './storage/MetadataStore';
export class FeatureStoreService {
private eventBus: EventBus;
private onlineStore: OnlineStore;
private offlineStore: OfflineStore;
private metadataStore: MetadataStore;
private registry: FeatureRegistry;
constructor() {
this.eventBus = new EventBus();
this.onlineStore = new OnlineStore();
this.offlineStore = new OfflineStore();
this.metadataStore = new MetadataStore();
this.registry = {
featureGroups: new Map(),
features: new Map(),
dependencies: new Map(),
lineage: new Map()
};
}
async initialize(): Promise<void> {
logger.info('🔄 Initializing Feature Store Service...');
await this.eventBus.initialize();
await this.onlineStore.initialize();
await this.offlineStore.initialize();
await this.metadataStore.initialize();
// Load existing feature groups from metadata store
await this.loadFeatureGroups();
// Subscribe to feature events
await this.eventBus.subscribe('feature.*', this.handleFeatureEvent.bind(this));
logger.info('✅ Feature Store Service initialized');
}
async createFeatureGroup(featureGroup: Omit<FeatureGroup, 'id' | 'createdAt' | 'updatedAt'>): Promise<FeatureGroup> {
const featureGroupWithId: FeatureGroup = {
...featureGroup,
id: this.generateFeatureGroupId(),
status: FeatureGroupStatus.DRAFT,
createdAt: new Date(),
updatedAt: new Date(),
};
// Store in metadata store
await this.metadataStore.saveFeatureGroup(featureGroupWithId);
// Update registry
this.registry.featureGroups.set(featureGroupWithId.id, featureGroupWithId);
// Register individual features
for (const feature of featureGroupWithId.features) {
const featureKey = `${featureGroupWithId.id}.${feature.name}`;
this.registry.features.set(featureKey, feature);
}
await this.eventBus.publish('feature.group.created', {
featureGroupId: featureGroupWithId.id,
featureGroup: featureGroupWithId,
});
logger.info(`📋 Created feature group: ${featureGroupWithId.name} (${featureGroupWithId.id})`);
return featureGroupWithId;
}
async updateFeatureGroup(id: string, updates: Partial<FeatureGroup>): Promise<FeatureGroup> {
const existingGroup = this.registry.featureGroups.get(id);
if (!existingGroup) {
throw new Error(`Feature group not found: ${id}`);
}
const updatedGroup: FeatureGroup = {
...existingGroup,
...updates,
id, // Ensure ID doesn't change
updatedAt: new Date(),
};
// Store in metadata store
await this.metadataStore.saveFeatureGroup(updatedGroup);
// Update registry
this.registry.featureGroups.set(id, updatedGroup);
await this.eventBus.publish('feature.group.updated', {
featureGroupId: id,
featureGroup: updatedGroup,
});
logger.info(`📝 Updated feature group: ${updatedGroup.name} (${id})`);
return updatedGroup;
}
async deleteFeatureGroup(id: string): Promise<void> {
const featureGroup = this.registry.featureGroups.get(id);
if (!featureGroup) {
throw new Error(`Feature group not found: ${id}`);
}
// Remove from stores
await this.metadataStore.deleteFeatureGroup(id);
await this.onlineStore.deleteFeatureGroup(id);
await this.offlineStore.deleteFeatureGroup(id);
// Update registry
this.registry.featureGroups.delete(id);
// Remove features from registry
for (const feature of featureGroup.features) {
const featureKey = `${id}.${feature.name}`;
this.registry.features.delete(featureKey);
}
await this.eventBus.publish('feature.group.deleted', {
featureGroupId: id,
featureGroup,
});
logger.info(`🗑️ Deleted feature group: ${featureGroup.name} (${id})`);
}
async getFeatureGroup(id: string): Promise<FeatureGroup | null> {
return this.registry.featureGroups.get(id) || null;
}
async listFeatureGroups(status?: FeatureGroupStatus): Promise<FeatureGroup[]> {
const groups = Array.from(this.registry.featureGroups.values());
return status ? groups.filter(group => group.status === status) : groups;
}
async getOnlineFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
logger.info(`🔍 Getting online features for ${request.entityIds.length} entities`);
const responses: FeatureResponse[] = [];
for (const entityId of request.entityIds) {
const features: Record<string, any> = {};
const metadata: Record<string, any> = {};
for (const featureGroupId of request.featureGroups) {
const featureGroup = this.registry.featureGroups.get(featureGroupId);
if (!featureGroup) {
logger.warn(`Feature group not found: ${featureGroupId}`);
continue;
}
const featureVector = await this.onlineStore.getFeatures(
entityId,
request.entityType,
featureGroupId,
request.asOfTime
);
if (featureVector) {
Object.assign(features, featureVector.values);
if (request.includeMetadata) {
metadata[featureGroupId] = featureVector.metadata;
}
}
}
responses.push({
entityId,
entityType: request.entityType,
features,
metadata,
timestamp: request.asOfTime || new Date(),
});
}
return responses;
}
async getHistoricalFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
logger.info(`📊 Getting historical features for ${request.entityIds.length} entities`);
return await this.offlineStore.getHistoricalFeatures(request);
}
async getBatchFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
logger.info(`📦 Getting batch features for ${request.entityIds.length} entities`);
// For batch requests, use offline store for efficiency
return await this.offlineStore.getBatchFeatures(request);
}
async ingestFeatures(featureVectors: FeatureVector[]): Promise<void> {
logger.info(`📥 Ingesting ${featureVectors.length} feature vectors`);
// Store in both online and offline stores
await Promise.all([
this.onlineStore.writeFeatures(featureVectors),
this.offlineStore.writeFeatures(featureVectors)
]);
await this.eventBus.publish('feature.ingested', {
vectorCount: featureVectors.length,
timestamp: new Date(),
});
}
async searchFeatures(query: string, filters?: Record<string, any>): Promise<any[]> {
const results: any[] = [];
for (const [groupId, group] of this.registry.featureGroups) {
for (const feature of group.features) {
const featureInfo = {
featureGroupId: groupId,
featureGroupName: group.name,
featureName: feature.name,
description: feature.description,
type: feature.type,
valueType: feature.valueType,
tags: feature.tags,
};
// Simple text search
const searchText = `${group.name} ${feature.name} ${feature.description || ''} ${feature.tags.join(' ')}`.toLowerCase();
if (searchText.includes(query.toLowerCase())) {
// Apply filters if provided
if (filters) {
let matches = true;
for (const [key, value] of Object.entries(filters)) {
if (featureInfo[key as keyof typeof featureInfo] !== value) {
matches = false;
break;
}
}
if (matches) {
results.push(featureInfo);
}
} else {
results.push(featureInfo);
}
}
}
}
return results;
}
async getFeatureLineage(featureGroupId: string, featureName: string): Promise<any> {
const lineageKey = `${featureGroupId}.${featureName}`;
return this.registry.lineage.get(lineageKey) || null;
}
async getFeatureUsage(featureGroupId: string, featureName: string): Promise<any> {
// In a real implementation, this would track feature usage across models and applications
return {
featureGroupId,
featureName,
usageCount: 0,
lastUsed: null,
consumers: [],
models: []
};
}
private async loadFeatureGroups(): Promise<void> {
logger.info('📂 Loading existing feature groups...');
const featureGroups = await this.metadataStore.getAllFeatureGroups();
for (const group of featureGroups) {
this.registry.featureGroups.set(group.id, group);
// Register individual features
for (const feature of group.features) {
const featureKey = `${group.id}.${feature.name}`;
this.registry.features.set(featureKey, feature);
}
}
logger.info(`📂 Loaded ${featureGroups.length} feature groups`);
}
private async handleFeatureEvent(event: any): Promise<void> {
logger.debug('📨 Received feature event:', event);
// Handle feature-level events
}
private generateFeatureGroupId(): string {
return `fg_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
async shutdown(): Promise<void> {
logger.info('🔄 Shutting down Feature Store Service...');
await this.onlineStore.shutdown();
await this.offlineStore.shutdown();
await this.metadataStore.shutdown();
await this.eventBus.disconnect();
logger.info('✅ Feature Store Service shutdown complete');
}
}

View file

@ -1,52 +0,0 @@
import { logger } from '@stock-bot/utils';
import { FeatureGroup } from '../../types/FeatureStore';
export class MetadataStore {
private featureGroups: Map<string, FeatureGroup> = new Map();
async initialize(): Promise<void> {
logger.info('🔄 Initializing Metadata Store...');
// In a real implementation, connect to PostgreSQL or other metadata store
this.featureGroups.clear();
logger.info('✅ Metadata Store initialized');
}
async saveFeatureGroup(featureGroup: FeatureGroup): Promise<void> {
this.featureGroups.set(featureGroup.id, { ...featureGroup });
logger.debug(`💾 Saved feature group metadata: ${featureGroup.id}`);
}
async getFeatureGroup(id: string): Promise<FeatureGroup | null> {
return this.featureGroups.get(id) || null;
}
async getAllFeatureGroups(): Promise<FeatureGroup[]> {
return Array.from(this.featureGroups.values());
}
async deleteFeatureGroup(id: string): Promise<void> {
this.featureGroups.delete(id);
logger.debug(`🗑️ Deleted feature group metadata: ${id}`);
}
async findFeatureGroups(criteria: Partial<FeatureGroup>): Promise<FeatureGroup[]> {
const groups = Array.from(this.featureGroups.values());
return groups.filter(group => {
for (const [key, value] of Object.entries(criteria)) {
if (group[key as keyof FeatureGroup] !== value) {
return false;
}
}
return true;
});
}
async shutdown(): Promise<void> {
logger.info('🔄 Shutting down Metadata Store...');
this.featureGroups.clear();
logger.info('✅ Metadata Store shutdown complete');
}
}

View file

@ -1,121 +0,0 @@
import { logger } from '@stock-bot/utils';
import { FeatureVector, FeatureRequest, FeatureResponse } from '../../types/FeatureStore';
export class OfflineStore {
private store: Map<string, FeatureVector[]> = new Map();
async initialize(): Promise<void> {
logger.info('🔄 Initializing Offline Store...');
// In a real implementation, connect to data warehouse, S3, etc.
this.store.clear();
logger.info('✅ Offline Store initialized');
}
async writeFeatures(featureVectors: FeatureVector[]): Promise<void> {
for (const vector of featureVectors) {
const partitionKey = this.buildPartitionKey(vector.entityType, vector.featureGroupId);
if (!this.store.has(partitionKey)) {
this.store.set(partitionKey, []);
}
this.store.get(partitionKey)!.push(vector);
}
logger.debug(`💾 Stored ${featureVectors.length} feature vectors in offline store`);
}
async getHistoricalFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
const responses: FeatureResponse[] = [];
for (const entityId of request.entityIds) {
const features: Record<string, any> = {};
const metadata: Record<string, any> = {};
for (const featureGroupId of request.featureGroups) {
const partitionKey = this.buildPartitionKey(request.entityType, featureGroupId);
const vectors = this.store.get(partitionKey) || [];
// Find the most recent vector for this entity before asOfTime
const relevantVectors = vectors
.filter(v => v.entityId === entityId)
.filter(v => !request.asOfTime || v.timestamp <= request.asOfTime)
.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());
if (relevantVectors.length > 0) {
const latestVector = relevantVectors[0];
Object.assign(features, latestVector.values);
if (request.includeMetadata) {
metadata[featureGroupId] = latestVector.metadata;
}
}
}
responses.push({
entityId,
entityType: request.entityType,
features,
metadata,
timestamp: request.asOfTime || new Date(),
});
}
return responses;
}
async getBatchFeatures(request: FeatureRequest): Promise<FeatureResponse[]> {
// For simplicity, use the same logic as historical features
// In a real implementation, this would use optimized batch processing
return await this.getHistoricalFeatures(request);
}
async getFeatureData(
featureGroupId: string,
entityType: string,
startTime?: Date,
endTime?: Date
): Promise<FeatureVector[]> {
const partitionKey = this.buildPartitionKey(entityType, featureGroupId);
let vectors = this.store.get(partitionKey) || [];
// Apply time filters
if (startTime) {
vectors = vectors.filter(v => v.timestamp >= startTime);
}
if (endTime) {
vectors = vectors.filter(v => v.timestamp <= endTime);
}
return vectors;
}
async deleteFeatureGroup(featureGroupId: string): Promise<void> {
const keysToDelete: string[] = [];
for (const key of this.store.keys()) {
if (key.includes(`:${featureGroupId}`)) {
keysToDelete.push(key);
}
}
for (const key of keysToDelete) {
this.store.delete(key);
}
logger.debug(`🗑️ Deleted ${keysToDelete.length} partitions for feature group: ${featureGroupId}`);
}
private buildPartitionKey(entityType: string, featureGroupId: string): string {
return `${entityType}:${featureGroupId}`;
}
async shutdown(): Promise<void> {
logger.info('🔄 Shutting down Offline Store...');
this.store.clear();
logger.info('✅ Offline Store shutdown complete');
}
}

View file

@ -1,75 +0,0 @@
import { logger } from '@stock-bot/utils';
import { FeatureVector, FeatureRequest, FeatureResponse } from '../../types/FeatureStore';
export class OnlineStore {
private store: Map<string, any> = new Map();
async initialize(): Promise<void> {
logger.info('🔄 Initializing Online Store...');
// In a real implementation, connect to Redis or other online store
this.store.clear();
logger.info('✅ Online Store initialized');
}
async writeFeatures(featureVectors: FeatureVector[]): Promise<void> {
for (const vector of featureVectors) {
const key = this.buildKey(vector.entityId, vector.entityType, vector.featureGroupId);
this.store.set(key, {
...vector,
timestamp: vector.timestamp,
});
}
logger.debug(`💾 Stored ${featureVectors.length} feature vectors in online store`);
}
async getFeatures(
entityId: string,
entityType: string,
featureGroupId: string,
asOfTime?: Date
): Promise<FeatureVector | null> {
const key = this.buildKey(entityId, entityType, featureGroupId);
const storedVector = this.store.get(key);
if (!storedVector) {
return null;
}
// If asOfTime is specified, check if the stored vector is valid at that time
if (asOfTime && storedVector.timestamp > asOfTime) {
return null;
}
return storedVector;
}
async deleteFeatureGroup(featureGroupId: string): Promise<void> {
const keysToDelete: string[] = [];
for (const key of this.store.keys()) {
if (key.includes(`:${featureGroupId}`)) {
keysToDelete.push(key);
}
}
for (const key of keysToDelete) {
this.store.delete(key);
}
logger.debug(`🗑️ Deleted ${keysToDelete.length} records for feature group: ${featureGroupId}`);
}
private buildKey(entityId: string, entityType: string, featureGroupId: string): string {
return `${entityType}:${entityId}:${featureGroupId}`;
}
async shutdown(): Promise<void> {
logger.info('🔄 Shutting down Online Store...');
this.store.clear();
logger.info('✅ Online Store shutdown complete');
}
}

View file

@ -1,243 +0,0 @@
// Feature Store Types
export interface FeatureGroup {
id: string;
name: string;
description?: string;
version: string;
features: Feature[];
source: FeatureSource;
schedule?: FeatureSchedule;
metadata: Record<string, any>;
createdAt: Date;
updatedAt: Date;
status: FeatureGroupStatus;
}
export enum FeatureGroupStatus {
DRAFT = 'draft',
ACTIVE = 'active',
DEPRECATED = 'deprecated',
ARCHIVED = 'archived',
}
export interface Feature {
name: string;
type: FeatureType;
description?: string;
valueType: 'number' | 'string' | 'boolean' | 'array' | 'object';
nullable: boolean;
defaultValue?: any;
validation?: FeatureValidation;
transformation?: FeatureTransformation;
tags: string[];
}
export enum FeatureType {
NUMERICAL = 'numerical',
CATEGORICAL = 'categorical',
BOOLEAN = 'boolean',
TEXT = 'text',
TIMESTAMP = 'timestamp',
DERIVED = 'derived',
}
export interface FeatureSource {
type: 'batch' | 'stream' | 'sql' | 'api' | 'file';
connection: Record<string, any>;
query?: string;
transformation?: string;
refreshInterval?: number;
}
export interface FeatureSchedule {
cronExpression: string;
enabled: boolean;
lastRun: Date | null;
nextRun: Date | null;
}
export interface FeatureValidation {
required: boolean;
minValue?: number;
maxValue?: number;
allowedValues?: any[];
pattern?: string;
customValidator?: string;
}
export interface FeatureTransformation {
type: 'normalize' | 'standardize' | 'encode' | 'custom';
parameters: Record<string, any>;
}
// Feature Value Types
export interface FeatureVector {
entityId: string;
entityType: string;
featureGroupId: string;
timestamp: Date;
values: Record<string, any>;
metadata?: Record<string, any>;
}
export interface FeatureRequest {
entityIds: string[];
entityType: string;
featureGroups: string[];
asOfTime?: Date;
pointInTime?: boolean;
includeMetadata?: boolean;
}
export interface FeatureResponse {
entityId: string;
entityType: string;
features: Record<string, any>;
metadata: Record<string, any>;
timestamp: Date;
}
// Feature Store Operations
export interface FeatureComputation {
id: string;
featureGroupId: string;
status: ComputationStatus;
startTime: Date;
endTime?: Date;
recordsProcessed: number;
recordsGenerated: number;
errors: ComputationError[];
metadata: Record<string, any>;
}
export enum ComputationStatus {
PENDING = 'pending',
RUNNING = 'running',
COMPLETED = 'completed',
FAILED = 'failed',
CANCELLED = 'cancelled',
}
export interface ComputationError {
entityId: string;
error: string;
timestamp: Date;
}
// Feature Statistics
export interface FeatureStatistics {
featureGroupId: string;
featureName: string;
statistics: {
count: number;
nullCount: number;
distinctCount: number;
min?: number;
max?: number;
mean?: number;
median?: number;
stdDev?: number;
percentiles?: Record<string, number>;
histogram?: HistogramBucket[];
topValues?: ValueCount[];
};
computedAt: Date;
}
export interface HistogramBucket {
min: number;
max: number;
count: number;
}
export interface ValueCount {
value: any;
count: number;
percentage: number;
}
// Feature Registry
export interface FeatureRegistry {
featureGroups: Map<string, FeatureGroup>;
features: Map<string, Feature>;
dependencies: Map<string, string[]>;
lineage: Map<string, FeatureLineage>;
}
export interface FeatureLineage {
featureGroupId: string;
featureName: string;
upstream: FeatureDependency[];
downstream: FeatureDependency[];
transformations: string[];
}
export interface FeatureDependency {
featureGroupId: string;
featureName: string;
dependencyType: 'direct' | 'derived' | 'aggregated';
}
// Storage Types
export interface FeatureStorageConfig {
online: OnlineStoreConfig;
offline: OfflineStoreConfig;
metadata: MetadataStoreConfig;
}
export interface OnlineStoreConfig {
type: 'redis' | 'dynamodb' | 'cassandra';
connection: Record<string, any>;
ttl?: number;
keyFormat?: string;
}
export interface OfflineStoreConfig {
type: 'parquet' | 'delta' | 'postgresql' | 's3';
connection: Record<string, any>;
partitioning?: PartitioningConfig;
}
export interface MetadataStoreConfig {
type: 'postgresql' | 'mysql' | 'sqlite';
connection: Record<string, any>;
}
export interface PartitioningConfig {
columns: string[];
strategy: 'time' | 'hash' | 'range';
granularity?: 'hour' | 'day' | 'month';
}
// Monitoring and Alerting
export interface FeatureMonitoring {
featureGroupId: string;
featureName: string;
monitors: FeatureMonitor[];
alerts: FeatureAlert[];
}
export interface FeatureMonitor {
name: string;
type: 'drift' | 'freshness' | 'availability' | 'quality';
threshold: number;
enabled: boolean;
configuration: Record<string, any>;
}
export interface FeatureAlert {
id: string;
monitorName: string;
level: 'warning' | 'error' | 'critical';
message: string;
timestamp: Date;
resolved: boolean;
resolvedAt?: Date;
}

View file

@ -1,23 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"module": "ESNext",
"moduleResolution": "bundler",
"declaration": true,
"declarationMap": true,
"allowImportingTsExtensions": true,
"noEmit": true,
"paths": {
"@/*": ["./src/*"]
}
},
"include": [
"src/**/*"
],
"exclude": [
"node_modules",
"dist"
]
}

View file

@ -1,25 +0,0 @@
{
"name": "backtest-engine",
"version": "1.0.0",
"description": "Dedicated backtesting engine for trading strategies",
"main": "src/index.ts",
"scripts": {
"dev": "bun run --watch src/index.ts",
"start": "bun run src/index.ts",
"test": "bun test --timeout 10000 src/tests/**/*.test.ts",
"test:watch": "bun test --watch src/tests/**/*.test.ts"
}, "dependencies": {
"hono": "^4.6.3",
"@stock-bot/types": "workspace:*",
"@stock-bot/utils": "workspace:*",
"@stock-bot/event-bus": "workspace:*",
"@stock-bot/api-client": "workspace:*",
"@stock-bot/config": "*",
"ws": "^8.18.0",
"axios": "^1.6.2"
},
"devDependencies": {
"bun-types": "^1.2.15",
"@types/ws": "^8.5.12"
}
}

View file

@ -1,650 +0,0 @@
import { EventEmitter } from 'events';
import { OHLCV } from '@stock-bot/types';
import { Order, Position } from '@stock-bot/types';
import { createLogger } from '@stock-bot/utils';
import { financialUtils } from '@stock-bot/utils';
const logger = createLogger('backtest-engine');
// Use OHLCV from types as BarData equivalent
export type BarData = OHLCV;
// Strategy interface to match existing pattern
export interface StrategyInterface {
id: string;
onBar(bar: BarData): Promise<Order[]> | Order[];
stop(): Promise<void>;
}
export interface BacktestConfig {
initialCapital: number;
startDate: Date;
endDate: Date;
commission: number;
slippage: number;
}
export interface BacktestResult {
startDate: Date;
endDate: Date;
initialCapital: number;
finalCapital: number;
totalReturn: number;
totalTrades: number;
winningTrades: number;
losingTrades: number;
winRate: number;
avgWin: number;
avgLoss: number;
profitFactor: number;
sharpeRatio: number;
maxDrawdown: number;
trades: Array<{
id: string;
symbol: string;
side: 'BUY' | 'SELL';
quantity: number;
entryTime: Date;
exitTime: Date;
entryPrice: number;
exitPrice: number;
pnl: number;
pnlPercent: number;
}>;
dailyReturns: Array<{
date: Date;
portfolioValue: number;
dayReturn: number;
}>;
}
export interface BacktestProgress {
currentDate: Date;
progress: number; // 0-100
portfolioValue: number;
totalTrades: number;
}
export interface DataFeed {
getHistoricalData(symbol: string, startDate: Date, endDate: Date): Promise<BarData[]>;
}
// Extended Position interface that includes additional fields needed for backtesting
export interface BacktestPosition {
symbol: string;
quantity: number;
averagePrice: number;
marketValue: number;
unrealizedPnL: number;
timestamp: Date;
// Additional fields for backtesting
avgPrice: number; // Alias for averagePrice
entryTime: Date;
}
// Extended Order interface that includes additional fields needed for backtesting
export interface BacktestOrder extends Order {
fillPrice?: number;
fillTime?: Date;
}
trades: Array<{
symbol: string;
entryTime: Date;
entryPrice: number;
exitTime: Date;
exitPrice: number;
quantity: number;
pnl: number;
pnlPercent: number;
}>;
}
export interface BacktestProgress {
progress: number; // 0-100
currentDate: Date;
processingSpeed: number; // Bars per second
estimatedTimeRemaining: number; // milliseconds
currentCapital: number;
currentReturn: number;
currentDrawdown: number;
}
export interface DataFeed {
getHistoricalData(symbol: string, resolution: string, start: Date, end: Date): Promise<BarData[]>;
hasDataFor(symbol: string, resolution: string, start: Date, end: Date): Promise<boolean>;
}
export class BacktestEngine extends EventEmitter {
private config: BacktestConfig;
private strategy: StrategyInterface;
private dataFeed: DataFeed;
private isRunning: boolean = false;
private barBuffer: Map<string, BarData[]> = new Map();
private pendingOrders: BacktestOrder[] = [];
private filledOrders: BacktestOrder[] = [];
private currentTime: Date;
private startTime: number = 0; // For performance tracking
private processedBars: number = 0;
private marketData: Map<string, BarData[]> = new Map();
// Results tracking
private initialCapital: number;
private currentCapital: number;
private positions = new Map<string, BacktestPosition>();
private trades: BacktestResult['trades'] = [];
private dailyReturns: BacktestResult['dailyReturns'] = [];
private previousPortfolioValue: number;
private highWaterMark: number;
private maxDrawdown: number = 0;
private drawdownStartTime: Date | null = null;
private maxDrawdownDuration: number = 0;
private winningTrades: number = 0;
private losingTrades: number = 0;
private breakEvenTrades: number = 0;
private totalProfits: number = 0;
private totalLosses: number = 0;
constructor(strategy: StrategyInterface, config: BacktestConfig, dataFeed: DataFeed) {
super();
this.strategy = strategy;
this.config = config;
this.dataFeed = dataFeed;
this.currentTime = new Date(config.startDate);
this.initialCapital = config.initialCapital;
this.currentCapital = config.initialCapital;
this.previousPortfolioValue = config.initialCapital;
this.highWaterMark = config.initialCapital;
}
async run(): Promise<BacktestResult> {
if (this.isRunning) {
throw new Error('Backtest is already running');
}
this.isRunning = true;
this.startTime = Date.now();
this.emit('started', { strategyId: this.strategy.id, config: this.config });
try {
await this.runEventBased();
const result = this.generateResults();
this.emit('completed', { strategyId: this.strategy.id, result });
this.isRunning = false;
return result;
} catch (error) {
this.isRunning = false;
this.emit('error', { strategyId: this.strategy.id, error });
throw error;
}
}
private async runEventBased(): Promise<void> {
// Load market data for all symbols
await this.loadMarketData();
// Initialize the strategy
await this.strategy.start();
// Create a merged timeline of all bars across all symbols, sorted by timestamp
const timeline = this.createMergedTimeline();
// Process each event in chronological order
let lastProgressUpdate = Date.now();
let prevDate = new Date(0);
for (let i = 0; i < timeline.length; i++) {
const bar = timeline[i];
this.currentTime = bar.timestamp;
// Process any pending orders
await this.processOrders(bar);
// Update positions with current prices
this.updatePositions(bar);
// If we've crossed to a new day, calculate daily return
if (this.currentTime.toDateString() !== prevDate.toDateString()) {
this.calculateDailyReturn();
prevDate = this.currentTime;
}
// Send the new bar to the strategy
const orders = await this.strategy.onBar(bar);
// Add any new orders to the pending orders queue
if (orders && orders.length > 0) {
this.pendingOrders.push(...orders);
}
// Update progress periodically
if (Date.now() - lastProgressUpdate > 1000) { // Update every second
this.updateProgress(i / timeline.length);
lastProgressUpdate = Date.now();
}
}
// Process any remaining orders
for (const order of this.pendingOrders) {
await this.processOrder(order);
}
// Close any remaining positions at the last known price
await this.closeAllPositions();
// Clean up strategy
await this.strategy.stop();
}
private async runVectorized(): Promise<void> {
// Load market data for all symbols
await this.loadMarketData();
// To implement a vectorized approach, we need to:
// 1. Pre-compute technical indicators
// 2. Generate buy/sell signals for the entire dataset
// 3. Calculate portfolio values based on signals
// This is a simplified implementation since specific vectorized strategies
// will need to be implemented separately based on the strategy type
const timeline = this.createMergedTimeline();
const startTime = Date.now();
// Initialize variables for tracking performance
let currentPositions = new Map<string, number>();
let currentCash = this.initialCapital;
let prevPortfolioValue = this.initialCapital;
let highWaterMark = this.initialCapital;
let maxDrawdown = 0;
let maxDrawdownStartDate = new Date();
let maxDrawdownEndDate = new Date();
let currentDrawdownStart = new Date();
// Pre-process data (this would be implemented by the specific strategy)
const allBars = new Map<string, BarData[]>();
for (const symbol of this.config.symbols) {
allBars.set(symbol, this.marketData.get(symbol) || []);
}
// Apply strategy logic (vectorized implementation would be here)
// For now, we'll just simulate the processing
this.emit('completed', { message: 'Vectorized backtest completed in fast mode' });
}
private async loadMarketData(): Promise<void> {
for (const symbol of this.config.symbols) {
this.emit('loading', { symbol, resolution: this.config.dataResolution });
// Check if data is available
const hasData = await this.dataFeed.hasDataFor(
symbol,
this.config.dataResolution,
this.config.startDate,
this.config.endDate
);
if (!hasData) {
throw new Error(`No data available for ${symbol} at resolution ${this.config.dataResolution}`);
}
// Load data
const data = await this.dataFeed.getHistoricalData(
symbol,
this.config.dataResolution,
this.config.startDate,
this.config.endDate
);
this.marketData.set(symbol, data);
this.emit('loaded', { symbol, count: data.length });
}
}
private createMergedTimeline(): BarData[] {
const allBars: BarData[] = [];
for (const [symbol, bars] of this.marketData.entries()) {
allBars.push(...bars);
}
// Sort by timestamp
return allBars.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
}
private async processOrders(currentBar: BarData): Promise<void> {
// Find orders for the current symbol
const ordersToProcess = this.pendingOrders.filter(order => order.symbol === currentBar.symbol);
if (ordersToProcess.length === 0) return;
// Remove these orders from pendingOrders
this.pendingOrders = this.pendingOrders.filter(order => order.symbol !== currentBar.symbol);
// Process each order
for (const order of ordersToProcess) {
await this.processOrder(order);
}
}
private async processOrder(order: Order): Promise<void> {
// Get the latest price for the symbol
const latestBars = this.marketData.get(order.symbol);
if (!latestBars || latestBars.length === 0) {
order.status = 'REJECTED';
this.emit('orderRejected', { order, reason: 'No market data available' });
return;
}
// Find the bar closest to the order time
const bar = latestBars.find(b =>
b.timestamp.getTime() >= order.timestamp.getTime()
) || latestBars[latestBars.length - 1];
// Calculate fill price with slippage
let fillPrice: number;
if (order.type === 'MARKET') {
// Apply slippage model
const slippageFactor = 1 + (order.side === 'BUY' ? this.config.slippage : -this.config.slippage);
fillPrice = bar.close * slippageFactor;
} else if (order.type === 'LIMIT' && order.price !== undefined) {
// For limit orders, check if the price was reached
if ((order.side === 'BUY' && bar.low <= order.price) ||
(order.side === 'SELL' && bar.high >= order.price)) {
fillPrice = order.price;
} else {
// Limit price not reached
return;
}
} else {
// Other order types not implemented
order.status = 'REJECTED';
this.emit('orderRejected', { order, reason: 'Order type not supported' });
return;
}
// Calculate commission
const orderValue = order.quantity * fillPrice;
const commission = orderValue * this.config.commission;
// Check if we have enough cash for BUY orders
if (order.side === 'BUY') {
const totalCost = orderValue + commission;
if (totalCost > this.currentCapital) {
// Not enough cash
order.status = 'REJECTED';
this.emit('orderRejected', { order, reason: 'Insufficient funds' });
return;
}
// Update cash
this.currentCapital -= totalCost;
// Update or create position
const existingPosition = this.positions.get(order.symbol);
if (existingPosition) {
// Update existing position (average down)
const totalShares = existingPosition.quantity + order.quantity;
const totalCost = (existingPosition.quantity * existingPosition.avgPrice) + (order.quantity * fillPrice);
existingPosition.avgPrice = totalCost / totalShares;
existingPosition.quantity = totalShares;
} else {
// Create new position
this.positions.set(order.symbol, {
symbol: order.symbol,
quantity: order.quantity,
avgPrice: fillPrice,
side: 'LONG',
entryTime: this.currentTime
});
}
} else if (order.side === 'SELL') {
const position = this.positions.get(order.symbol);
if (!position || position.quantity < order.quantity) {
// Not enough shares to sell
order.status = 'REJECTED';
this.emit('orderRejected', { order, reason: 'Insufficient position' });
return;
}
// Calculate P&L
const pnl = (fillPrice - position.avgPrice) * order.quantity;
// Update cash
this.currentCapital += orderValue - commission;
// Update position
position.quantity -= order.quantity;
if (position.quantity === 0) {
// Position closed, record the trade
this.positions.delete(order.symbol);
this.trades.push({
symbol: order.symbol,
entryTime: position.entryTime,
entryPrice: position.avgPrice,
exitTime: this.currentTime,
exitPrice: fillPrice,
quantity: order.quantity,
pnl: pnl,
pnlPercent: (pnl / (position.avgPrice * order.quantity)) * 100
});
// Update statistics
if (pnl > 0) {
this.winningTrades++;
this.totalProfits += pnl;
} else if (pnl < 0) {
this.losingTrades++;
this.totalLosses -= pnl; // Make positive for easier calculations
} else {
this.breakEvenTrades++;
}
}
}
// Mark order as filled
order.status = 'FILLED';
order.fillPrice = fillPrice;
order.fillTime = this.currentTime;
this.filledOrders.push(order);
// Notify strategy
await this.strategy.onOrderFilled(order);
this.emit('orderFilled', { order });
}
private updatePositions(currentBar: BarData): void {
// Update the unrealized P&L for positions in this symbol
const position = this.positions.get(currentBar.symbol);
if (position) {
const currentPrice = currentBar.close;
const unrealizedPnL = (currentPrice - position.avgPrice) * position.quantity;
position.unrealizedPnL = unrealizedPnL;
}
// Calculate total portfolio value
const portfolioValue = this.calculatePortfolioValue();
// Check for new high water mark
if (portfolioValue > this.highWaterMark) {
this.highWaterMark = portfolioValue;
this.drawdownStartTime = null;
}
// Check for drawdown
if (this.drawdownStartTime === null && portfolioValue < this.highWaterMark) {
this.drawdownStartTime = this.currentTime;
}
// Update max drawdown
if (this.highWaterMark > 0) {
const currentDrawdown = (this.highWaterMark - portfolioValue) / this.highWaterMark;
if (currentDrawdown > this.maxDrawdown) {
this.maxDrawdown = currentDrawdown;
// Calculate drawdown duration
if (this.drawdownStartTime !== null) {
const drawdownDuration = (this.currentTime.getTime() - this.drawdownStartTime.getTime()) / (1000 * 60 * 60 * 24); // In days
if (drawdownDuration > this.maxDrawdownDuration) {
this.maxDrawdownDuration = drawdownDuration;
}
}
}
}
this.previousPortfolioValue = portfolioValue;
}
private calculatePortfolioValue(): number {
let totalValue = this.currentCapital;
// Add the current value of all positions
for (const [symbol, position] of this.positions.entries()) {
// Find the latest price for this symbol
const bars = this.marketData.get(symbol);
if (bars && bars.length > 0) {
const latestBar = bars[bars.length - 1];
totalValue += position.quantity * latestBar.close;
} else {
// If no price data, use the average price (not ideal but better than nothing)
totalValue += position.quantity * position.avgPrice;
}
}
return totalValue;
}
private calculateDailyReturn(): void {
const portfolioValue = this.calculatePortfolioValue();
const dailyReturn = (portfolioValue - this.previousPortfolioValue) / this.previousPortfolioValue;
this.dailyReturns.push({
date: new Date(this.currentTime),
return: dailyReturn
});
this.previousPortfolioValue = portfolioValue;
}
private async closeAllPositions(): Promise<void> {
for (const [symbol, position] of this.positions.entries()) {
// Find the latest price
const bars = this.marketData.get(symbol);
if (!bars || bars.length === 0) continue;
const lastBar = bars[bars.length - 1];
const closePrice = lastBar.close;
// Calculate P&L
const pnl = (closePrice - position.avgPrice) * position.quantity;
// Update cash
this.currentCapital += position.quantity * closePrice;
// Record the trade
this.trades.push({
symbol,
entryTime: position.entryTime,
entryPrice: position.avgPrice,
exitTime: this.currentTime,
exitPrice: closePrice,
quantity: position.quantity,
pnl,
pnlPercent: (pnl / (position.avgPrice * position.quantity)) * 100
});
// Update statistics
if (pnl > 0) {
this.winningTrades++;
this.totalProfits += pnl;
} else if (pnl < 0) {
this.losingTrades++;
this.totalLosses -= pnl; // Make positive for easier calculations
} else {
this.breakEvenTrades++;
}
}
// Clear positions
this.positions.clear();
}
private updateProgress(progress: number): void {
const currentPortfolioValue = this.calculatePortfolioValue();
const currentDrawdown = this.highWaterMark > 0
? (this.highWaterMark - currentPortfolioValue) / this.highWaterMark
: 0;
const elapsedMs = Date.now() - this.startTime;
const totalEstimatedMs = elapsedMs / progress;
const remainingMs = totalEstimatedMs - elapsedMs;
this.emit('progress', {
progress: progress * 100,
currentDate: this.currentTime,
processingSpeed: this.processedBars / (elapsedMs / 1000),
estimatedTimeRemaining: remainingMs,
currentCapital: this.currentCapital,
currentReturn: (currentPortfolioValue - this.initialCapital) / this.initialCapital,
currentDrawdown
} as BacktestProgress);
}
private generateResults(): BacktestResult {
const currentPortfolioValue = this.calculatePortfolioValue();
const totalReturn = (currentPortfolioValue - this.initialCapital) / this.initialCapital;
// Calculate annualized return
const days = (this.config.endDate.getTime() - this.config.startDate.getTime()) / (1000 * 60 * 60 * 24);
const annualizedReturn = Math.pow(1 + totalReturn, 365 / days) - 1;
// Calculate Sharpe Ratio
let sharpeRatio = 0;
if (this.dailyReturns.length > 1) {
const dailyReturnValues = this.dailyReturns.map(dr => dr.return);
const avgDailyReturn = dailyReturnValues.reduce((sum, ret) => sum + ret, 0) / dailyReturnValues.length;
const stdDev = Math.sqrt(
dailyReturnValues.reduce((sum, ret) => sum + Math.pow(ret - avgDailyReturn, 2), 0) / dailyReturnValues.length
);
// Annualize
sharpeRatio = stdDev > 0
? (avgDailyReturn * 252) / (stdDev * Math.sqrt(252))
: 0;
}
// Calculate win rate and profit factor
const totalTrades = this.winningTrades + this.losingTrades + this.breakEvenTrades;
const winRate = totalTrades > 0 ? this.winningTrades / totalTrades : 0;
const profitFactor = this.totalLosses > 0 ? this.totalProfits / this.totalLosses : (this.totalProfits > 0 ? Infinity : 0);
// Calculate average winning and losing trade
const avgWinningTrade = this.winningTrades > 0 ? this.totalProfits / this.winningTrades : 0;
const avgLosingTrade = this.losingTrades > 0 ? this.totalLosses / this.losingTrades : 0;
return {
strategyId: this.strategy.id,
startDate: this.config.startDate,
endDate: this.config.endDate,
duration: Date.now() - this.startTime,
initialCapital: this.initialCapital,
finalCapital: currentPortfolioValue,
totalReturn,
annualizedReturn,
sharpeRatio,
maxDrawdown: this.maxDrawdown,
maxDrawdownDuration: this.maxDrawdownDuration,
winRate,
totalTrades,
winningTrades: this.winningTrades,
losingTrades: this.losingTrades,
averageWinningTrade: avgWinningTrade,
averageLosingTrade: avgLosingTrade,
profitFactor,
dailyReturns: this.dailyReturns,
trades: this.trades
};
}
}

View file

@ -1,186 +0,0 @@
import { BaseStrategy } from '../Strategy';
import { BacktestConfig, BacktestEngine, BacktestResult } from './BacktestEngine';
import { MarketDataFeed } from './MarketDataFeed';
import { StrategyRegistry, StrategyType } from '../strategies/StrategyRegistry';
export interface BacktestRequest {
strategyType: StrategyType;
strategyParams: Record<string, any>;
symbols: string[];
startDate: Date | string;
endDate: Date | string;
initialCapital: number;
dataResolution: '1m' | '5m' | '15m' | '30m' | '1h' | '4h' | '1d';
commission: number;
slippage: number;
mode: 'event' | 'vector';
}
/**
* Backtesting Service
*
* A service that handles backtesting requests and manages backtesting sessions.
*/
export class BacktestService {
private readonly strategyRegistry: StrategyRegistry;
private readonly dataFeed: MarketDataFeed;
private readonly activeBacktests: Map<string, BacktestEngine> = new Map();
constructor(apiBaseUrl: string = 'http://localhost:3001/api') {
this.strategyRegistry = StrategyRegistry.getInstance();
this.dataFeed = new MarketDataFeed(apiBaseUrl);
}
/**
* Run a backtest based on a request
*/
async runBacktest(request: BacktestRequest): Promise<BacktestResult> {
// Create a strategy instance
const strategyId = `backtest_${Date.now()}`;
const strategy = this.strategyRegistry.createStrategy(
request.strategyType,
strategyId,
`Backtest ${request.strategyType}`,
`Generated backtest for ${request.symbols.join(', ')}`,
request.symbols,
request.strategyParams
);
// Parse dates if they are strings
const startDate = typeof request.startDate === 'string'
? new Date(request.startDate)
: request.startDate;
const endDate = typeof request.endDate === 'string'
? new Date(request.endDate)
: request.endDate;
// Create backtest configuration
const config: BacktestConfig = {
startDate,
endDate,
symbols: request.symbols,
initialCapital: request.initialCapital,
commission: request.commission,
slippage: request.slippage,
dataResolution: request.dataResolution,
mode: request.mode
};
// Create and run the backtest engine
const engine = new BacktestEngine(strategy, config, this.dataFeed);
this.activeBacktests.set(strategyId, engine);
try {
// Set up event forwarding
const forwardEvents = (eventName: string) => {
engine.on(eventName, (data) => {
console.log(`[Backtest ${strategyId}] ${eventName}:`, data);
});
};
forwardEvents('started');
forwardEvents('loading');
forwardEvents('loaded');
forwardEvents('progress');
forwardEvents('orderFilled');
forwardEvents('orderRejected');
forwardEvents('completed');
forwardEvents('error');
// Run the backtest
const result = await engine.run();
// Clean up
this.activeBacktests.delete(strategyId);
return result;
} catch (error) {
this.activeBacktests.delete(strategyId);
throw error;
}
}
/**
* Optimize a strategy by running multiple backtests with different parameters
*/
async optimizeStrategy(
baseRequest: BacktestRequest,
parameterGrid: Record<string, any[]>
): Promise<Array<BacktestResult & { parameters: Record<string, any> }>> {
const results: Array<BacktestResult & { parameters: Record<string, any> }> = [];
// Generate parameter combinations
const paramKeys = Object.keys(parameterGrid);
const combinations = this.generateParameterCombinations(parameterGrid, paramKeys);
// Run backtest for each combination
for (const paramSet of combinations) {
const request = {
...baseRequest,
strategyParams: {
...baseRequest.strategyParams,
...paramSet
}
};
try {
const result = await this.runBacktest(request);
results.push({
...result,
parameters: paramSet
});
} catch (error) {
console.error(`Optimization failed for parameters:`, paramSet, error);
}
}
// Sort by performance metric (e.g., Sharpe ratio)
return results.sort((a, b) => b.sharpeRatio - a.sharpeRatio);
}
/**
* Generate all combinations of parameters for grid search
*/
private generateParameterCombinations(
grid: Record<string, any[]>,
keys: string[],
current: Record<string, any> = {},
index: number = 0,
result: Record<string, any>[] = []
): Record<string, any>[] {
if (index === keys.length) {
result.push({ ...current });
return result;
}
const key = keys[index];
const values = grid[key];
for (const value of values) {
current[key] = value;
this.generateParameterCombinations(grid, keys, current, index + 1, result);
}
return result;
}
/**
* Get an active backtest engine by ID
*/
getBacktestEngine(id: string): BacktestEngine | undefined {
return this.activeBacktests.get(id);
}
/**
* Cancel a running backtest
*/
cancelBacktest(id: string): boolean {
const engine = this.activeBacktests.get(id);
if (!engine) return false;
// No explicit cancel method on engine, but we can clean up
this.activeBacktests.delete(id);
return true;
}
}

View file

@ -1,166 +0,0 @@
import { BarData } from '../Strategy';
import { DataFeed } from './BacktestEngine';
import axios from 'axios';
export class MarketDataFeed implements DataFeed {
private readonly apiBaseUrl: string;
private cache: Map<string, BarData[]> = new Map();
constructor(apiBaseUrl: string = 'http://localhost:3001/api') {
this.apiBaseUrl = apiBaseUrl;
}
async getHistoricalData(symbol: string, resolution: string, start: Date, end: Date): Promise<BarData[]> {
const cacheKey = this.getCacheKey(symbol, resolution, start, end);
// Check cache first
if (this.cache.has(cacheKey)) {
return this.cache.get(cacheKey)!;
}
try {
// Format dates for API request
const startStr = start.toISOString();
const endStr = end.toISOString();
const response = await axios.get(`${this.apiBaseUrl}/market-data/history`, {
params: {
symbol,
resolution,
start: startStr,
end: endStr
}
});
if (!response.data.success || !response.data.data) {
throw new Error(`Failed to fetch historical data for ${symbol}`);
}
// Transform API response to BarData objects
const bars: BarData[] = response.data.data.map((bar: any) => ({
symbol,
timestamp: new Date(bar.timestamp),
open: bar.open,
high: bar.high,
low: bar.low,
close: bar.close,
volume: bar.volume
}));
// Cache the result
this.cache.set(cacheKey, bars);
return bars;
} catch (error) {
console.error(`Error fetching historical data for ${symbol}:`, error);
// Return fallback test data if API call fails
return this.generateFallbackTestData(symbol, resolution, start, end);
}
}
async hasDataFor(symbol: string, resolution: string, start: Date, end: Date): Promise<boolean> {
try {
const startStr = start.toISOString();
const endStr = end.toISOString();
const response = await axios.get(`${this.apiBaseUrl}/market-data/available`, {
params: {
symbol,
resolution,
start: startStr,
end: endStr
}
});
return response.data.success && response.data.data.available;
} catch (error) {
console.error(`Error checking data availability for ${symbol}:`, error);
// Assume data is available for test purposes
return true;
}
}
clearCache(): void {
this.cache.clear();
}
private getCacheKey(symbol: string, resolution: string, start: Date, end: Date): string {
return `${symbol}_${resolution}_${start.getTime()}_${end.getTime()}`;
}
private generateFallbackTestData(symbol: string, resolution: string, start: Date, end: Date): BarData[] {
console.warn(`Generating fallback test data for ${symbol} from ${start} to ${end}`);
const bars: BarData[] = [];
let current = new Date(start);
let basePrice = this.getBasePrice(symbol);
// Generate daily bars by default
const interval = this.getIntervalFromResolution(resolution);
while (current.getTime() <= end.getTime()) {
// Only generate bars for trading days (skip weekends)
if (current.getDay() !== 0 && current.getDay() !== 6) {
// Generate a random daily price movement (-1% to +1%)
const dailyChange = (Math.random() * 2 - 1) / 100;
// Add some randomness to the volatility
const volatility = 0.005 + Math.random() * 0.01; // 0.5% to 1.5%
const open = basePrice * (1 + (Math.random() * 0.002 - 0.001));
const close = open * (1 + dailyChange);
const high = Math.max(open, close) * (1 + Math.random() * volatility);
const low = Math.min(open, close) * (1 - Math.random() * volatility);
const volume = Math.floor(100000 + Math.random() * 900000);
bars.push({
symbol,
timestamp: new Date(current),
open,
high,
low,
close,
volume
});
// Update base price for next bar
basePrice = close;
}
// Move to next interval
current = new Date(current.getTime() + interval);
}
return bars;
}
private getBasePrice(symbol: string): number {
// Return a realistic base price for common symbols
switch (symbol.toUpperCase()) {
case 'AAPL': return 170 + Math.random() * 30;
case 'MSFT': return 370 + Math.random() * 50;
case 'AMZN': return 140 + Math.random() * 20;
case 'GOOGL': return 130 + Math.random() * 20;
case 'META': return 300 + Math.random() * 50;
case 'TSLA': return 180 + Math.random() * 70;
case 'NVDA': return 700 + Math.random() * 200;
case 'SPY': return 450 + Math.random() * 30;
case 'QQQ': return 370 + Math.random() * 40;
default: return 100 + Math.random() * 50;
}
}
private getIntervalFromResolution(resolution: string): number {
// Return milliseconds for each resolution
switch (resolution) {
case '1m': return 60 * 1000;
case '5m': return 5 * 60 * 1000;
case '15m': return 15 * 60 * 1000;
case '30m': return 30 * 60 * 1000;
case '1h': return 60 * 60 * 1000;
case '4h': return 4 * 60 * 60 * 1000;
case '1d': return 24 * 60 * 60 * 1000;
default: return 24 * 60 * 60 * 1000; // Default to daily
}
}
}

View file

@ -1,325 +0,0 @@
import { BacktestResult } from './BacktestEngine';
/**
* Performance Analysis Utilities
*
* Provides additional metrics and analysis tools for backtesting results.
*/
export class PerformanceAnalytics {
/**
* Calculate additional metrics from backtest results
*/
static enhanceResults(result: BacktestResult): BacktestResult {
// Calculate additional metrics
const enhancedResult = {
...result,
...this.calculateAdvancedMetrics(result)
};
return enhancedResult;
}
/**
* Calculate advanced performance metrics
*/
private static calculateAdvancedMetrics(result: BacktestResult): Partial<BacktestResult> {
// Extract daily returns
const dailyReturns = result.dailyReturns.map(dr => dr.return);
// Calculate Sortino ratio
const sortinoRatio = this.calculateSortinoRatio(dailyReturns);
// Calculate Calmar ratio
const calmarRatio = result.maxDrawdown > 0
? result.annualizedReturn / result.maxDrawdown
: Infinity;
// Calculate Omega ratio
const omegaRatio = this.calculateOmegaRatio(dailyReturns);
// Calculate CAGR
const startTimestamp = result.startDate.getTime();
const endTimestamp = result.endDate.getTime();
const yearsElapsed = (endTimestamp - startTimestamp) / (365 * 24 * 60 * 60 * 1000);
const cagr = Math.pow(result.finalCapital / result.initialCapital, 1 / yearsElapsed) - 1;
// Calculate additional volatility and return metrics
const volatility = this.calculateVolatility(dailyReturns);
const ulcerIndex = this.calculateUlcerIndex(result.dailyReturns);
return {
sortinoRatio,
calmarRatio,
omegaRatio,
cagr,
volatility,
ulcerIndex
};
}
/**
* Calculate Sortino ratio (downside risk-adjusted return)
*/
private static calculateSortinoRatio(dailyReturns: number[]): number {
if (dailyReturns.length === 0) return 0;
const avgReturn = dailyReturns.reduce((sum, ret) => sum + ret, 0) / dailyReturns.length;
// Filter only negative returns (downside)
const negativeReturns = dailyReturns.filter(ret => ret < 0);
if (negativeReturns.length === 0) return Infinity;
// Calculate downside deviation
const downsideDeviation = Math.sqrt(
negativeReturns.reduce((sum, ret) => sum + Math.pow(ret, 2), 0) / negativeReturns.length
);
// Annualize
const annualizedReturn = avgReturn * 252;
const annualizedDownsideDeviation = downsideDeviation * Math.sqrt(252);
return annualizedDownsideDeviation > 0
? annualizedReturn / annualizedDownsideDeviation
: 0;
}
/**
* Calculate Omega ratio (probability-weighted ratio of gains versus losses)
*/
private static calculateOmegaRatio(dailyReturns: number[], threshold = 0): number {
if (dailyReturns.length === 0) return 0;
let sumGains = 0;
let sumLosses = 0;
for (const ret of dailyReturns) {
if (ret > threshold) {
sumGains += (ret - threshold);
} else {
sumLosses += (threshold - ret);
}
}
return sumLosses > 0 ? sumGains / sumLosses : Infinity;
}
/**
* Calculate annualized volatility
*/
private static calculateVolatility(returns: number[]): number {
if (returns.length < 2) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
// Annualize
return Math.sqrt(variance * 252);
}
/**
* Calculate Ulcer Index (measure of downside risk)
*/
private static calculateUlcerIndex(dailyReturns: Array<{ date: Date; return: number }>): number {
if (dailyReturns.length === 0) return 0;
// Calculate running equity curve
let equity = 1;
const equityCurve = dailyReturns.map(dr => {
equity *= (1 + dr.return);
return equity;
});
// Find running maximum
const runningMax: number[] = [];
let currentMax = equityCurve[0];
for (const value of equityCurve) {
currentMax = Math.max(currentMax, value);
runningMax.push(currentMax);
}
// Calculate percentage drawdowns
const percentDrawdowns = equityCurve.map((value, i) =>
(runningMax[i] - value) / runningMax[i]
);
// Calculate Ulcer Index
const sumSquaredDrawdowns = percentDrawdowns.reduce(
(sum, dd) => sum + dd * dd, 0
);
return Math.sqrt(sumSquaredDrawdowns / percentDrawdowns.length);
}
/**
* Extract monthly returns from daily returns
*/
static calculateMonthlyReturns(dailyReturns: Array<{ date: Date; return: number }>): Array<{
year: number;
month: number;
return: number;
}> {
const monthlyReturns: Array<{ year: number; month: number; return: number }> = [];
if (dailyReturns.length === 0) return monthlyReturns;
// Group returns by year and month
const groupedReturns: Record<string, number[]> = {};
for (const dr of dailyReturns) {
const year = dr.date.getFullYear();
const month = dr.date.getMonth();
const key = `${year}-${month}`;
if (!groupedReturns[key]) {
groupedReturns[key] = [];
}
groupedReturns[key].push(dr.return);
}
// Calculate compound return for each month
for (const key in groupedReturns) {
const [yearStr, monthStr] = key.split('-');
const year = parseInt(yearStr);
const month = parseInt(monthStr);
// Compound the daily returns for the month
const monthReturn = groupedReturns[key].reduce(
(product, ret) => product * (1 + ret), 1
) - 1;
monthlyReturns.push({ year, month, return: monthReturn });
}
// Sort by date
return monthlyReturns.sort((a, b) => {
if (a.year !== b.year) return a.year - b.year;
return a.month - b.month;
});
}
/**
* Create drawdown analysis from equity curve
*/
static analyzeDrawdowns(dailyReturns: Array<{ date: Date; return: number }>): Array<{
startDate: Date;
endDate: Date;
recoveryDate: Date | null;
drawdown: number;
durationDays: number;
recoveryDays: number | null;
}> {
if (dailyReturns.length === 0) return [];
// Calculate equity curve
let equity = 1;
const equityCurve = dailyReturns.map(dr => {
equity *= (1 + dr.return);
return { date: dr.date, equity };
});
// Analyze drawdowns
const drawdowns: Array<{
startDate: Date;
endDate: Date;
recoveryDate: Date | null;
drawdown: number;
durationDays: number;
recoveryDays: number | null;
}> = [];
let peakEquity = equityCurve[0].equity;
let peakDate = equityCurve[0].date;
let inDrawdown = false;
let currentDrawdown: {
startDate: Date;
endDate: Date;
lowEquity: number;
peakEquity: number;
} | null = null;
// Find drawdown periods
for (let i = 1; i < equityCurve.length; i++) {
const { date, equity } = equityCurve[i];
// New peak
if (equity > peakEquity) {
peakEquity = equity;
peakDate = date;
// If recovering from drawdown, record recovery
if (inDrawdown && currentDrawdown) {
const recoveryDate = date;
const drawdownPct = (currentDrawdown.peakEquity - currentDrawdown.lowEquity) /
currentDrawdown.peakEquity;
const durationDays = Math.floor(
(currentDrawdown.endDate.getTime() - currentDrawdown.startDate.getTime()) /
(1000 * 60 * 60 * 24)
);
const recoveryDays = Math.floor(
(recoveryDate.getTime() - currentDrawdown.endDate.getTime()) /
(1000 * 60 * 60 * 24)
);
drawdowns.push({
startDate: currentDrawdown.startDate,
endDate: currentDrawdown.endDate,
recoveryDate,
drawdown: drawdownPct,
durationDays,
recoveryDays
});
inDrawdown = false;
currentDrawdown = null;
}
}
// In drawdown
else {
const drawdownPct = (peakEquity - equity) / peakEquity;
if (!inDrawdown) {
// Start of new drawdown
inDrawdown = true;
currentDrawdown = {
startDate: peakDate,
endDate: date,
lowEquity: equity,
peakEquity
};
} else if (currentDrawdown && equity < currentDrawdown.lowEquity) {
// New low in current drawdown
currentDrawdown.lowEquity = equity;
currentDrawdown.endDate = date;
}
}
}
// Handle any ongoing drawdown at the end
if (inDrawdown && currentDrawdown) {
const drawdownPct = (currentDrawdown.peakEquity - currentDrawdown.lowEquity) /
currentDrawdown.peakEquity;
const durationDays = Math.floor(
(currentDrawdown.endDate.getTime() - currentDrawdown.startDate.getTime()) /
(1000 * 60 * 60 * 24)
);
drawdowns.push({
startDate: currentDrawdown.startDate,
endDate: currentDrawdown.endDate,
recoveryDate: null,
drawdown: drawdownPct,
durationDays,
recoveryDays: null
});
}
// Sort by drawdown magnitude
return drawdowns.sort((a, b) => b.drawdown - a.drawdown);
}
}

View file

@ -1,12 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"module": "ESNext",
"moduleResolution": "bundler",
"types": ["bun-types"]
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,22 +0,0 @@
{
"name": "signal-engine",
"version": "1.0.0",
"description": "Real-time signal generation and processing engine",
"main": "src/index.ts",
"scripts": {
"dev": "bun run --watch src/index.ts",
"start": "bun run src/index.ts",
"test": "bun test --timeout 10000 src/tests/**/*.test.ts",
"test:watch": "bun test --watch src/tests/**/*.test.ts"
},
"dependencies": {
"hono": "^4.6.3",
"@stock-bot/config": "*",
"@stock-bot/types": "*",
"ws": "^8.18.0"
},
"devDependencies": {
"bun-types": "^1.2.15",
"@types/ws": "^8.5.12"
}
}

View file

@ -1,12 +0,0 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"module": "ESNext",
"moduleResolution": "bundler",
"types": ["bun-types"]
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

View file

@ -1,24 +0,0 @@
{
"name": "strategy-orchestrator",
"version": "1.0.0",
"description": "Trading strategy lifecycle management service",
"main": "src/index.ts", "scripts": {
"dev": "bun run --watch src/index.ts",
"start": "bun run src/index.ts",
"test": "bun test --timeout 10000 src/tests/**/*.test.ts",
"test:watch": "bun test --watch src/tests/**/*.test.ts"
}, "dependencies": {
"hono": "^4.6.3",
"ioredis": "^5.4.1",
"@stock-bot/config": "*",
"@stock-bot/types": "*",
"ws": "^8.18.0",
"node-cron": "^3.0.3",
"axios": "^1.6.2"
},
"devDependencies": {
"bun-types": "^1.2.15",
"@types/ws": "^8.5.12",
"@types/node-cron": "^3.0.11"
}
}

View file

@ -1,266 +0,0 @@
import { Request, Response } from 'express';
import { StrategyRegistry, StrategyType } from '../core/strategies/StrategyRegistry';
import { BacktestRequest, BacktestService } from '../core/backtesting/BacktestService';
import { BaseStrategy } from '../core/Strategy';
import { PerformanceAnalytics } from '../core/backtesting/PerformanceAnalytics';
/**
* Strategy Controller
*
* Handles HTTP requests related to strategy management, backtesting, and execution.
*/
export class StrategyController {
private readonly strategyRegistry: StrategyRegistry;
private readonly backtestService: BacktestService;
constructor(apiBaseUrl: string = 'http://localhost:3001/api') {
this.strategyRegistry = StrategyRegistry.getInstance();
this.backtestService = new BacktestService(apiBaseUrl);
}
/**
* Get all available strategy types
*/
public getStrategyTypes(req: Request, res: Response): void {
const types = Object.values(StrategyType);
res.json({
success: true,
data: types
});
}
/**
* Get all strategies
*/
public getStrategies(req: Request, res: Response): void {
const strategies = this.strategyRegistry.getAllStrategies();
// Convert to array of plain objects for serialization
const serializedStrategies = strategies.map(strategy => ({
id: strategy.id,
name: strategy.name,
description: strategy.description,
symbols: strategy.symbols,
parameters: strategy.parameters,
type: this.strategyRegistry.getStrategyType(strategy)
}));
res.json({
success: true,
data: serializedStrategies
});
}
/**
* Get a specific strategy by ID
*/
public getStrategy(req: Request, res: Response): void {
const { id } = req.params;
const strategy = this.strategyRegistry.getStrategyById(id);
if (!strategy) {
res.status(404).json({
success: false,
error: `Strategy with ID ${id} not found`
});
return;
}
const type = this.strategyRegistry.getStrategyType(strategy);
res.json({
success: true,
data: {
id: strategy.id,
name: strategy.name,
description: strategy.description,
symbols: strategy.symbols,
parameters: strategy.parameters,
type
}
});
}
/**
* Create a new strategy
*/
public createStrategy(req: Request, res: Response): void {
try {
const { name, description, symbols, parameters, type } = req.body;
if (!type || !Object.values(StrategyType).includes(type)) {
res.status(400).json({
success: false,
error: 'Invalid strategy type'
});
return;
}
const strategy = this.strategyRegistry.createStrategy(
type as StrategyType,
`strategy_${Date.now()}`, // Generate an ID
name || `New ${type} Strategy`,
description || `Generated ${type} strategy`,
symbols || [],
parameters || {}
);
res.status(201).json({
success: true,
data: {
id: strategy.id,
name: strategy.name,
description: strategy.description,
symbols: strategy.symbols,
parameters: strategy.parameters,
type
}
});
} catch (error) {
res.status(500).json({
success: false,
error: (error as Error).message
});
}
}
/**
* Update an existing strategy
*/
public updateStrategy(req: Request, res: Response): void {
const { id } = req.params;
const { name, description, symbols, parameters } = req.body;
const strategy = this.strategyRegistry.getStrategyById(id);
if (!strategy) {
res.status(404).json({
success: false,
error: `Strategy with ID ${id} not found`
});
return;
}
// Update properties
if (name !== undefined) strategy.name = name;
if (description !== undefined) strategy.description = description;
if (symbols !== undefined) strategy.symbols = symbols;
if (parameters !== undefined) strategy.parameters = parameters;
res.json({
success: true,
data: {
id: strategy.id,
name: strategy.name,
description: strategy.description,
symbols: strategy.symbols,
parameters: strategy.parameters,
type: this.strategyRegistry.getStrategyType(strategy)
}
});
}
/**
* Delete a strategy
*/
public deleteStrategy(req: Request, res: Response): void {
const { id } = req.params;
const success = this.strategyRegistry.deleteStrategy(id);
if (!success) {
res.status(404).json({
success: false,
error: `Strategy with ID ${id} not found`
});
return;
}
res.json({
success: true,
data: { id }
});
}
/**
* Run a backtest
*/
public async runBacktest(req: Request, res: Response): Promise<void> {
try {
const backtestRequest: BacktestRequest = req.body;
// Validate request
if (!backtestRequest.strategyType) {
res.status(400).json({
success: false,
error: 'Strategy type is required'
});
return;
}
if (!backtestRequest.symbols || backtestRequest.symbols.length === 0) {
res.status(400).json({
success: false,
error: 'At least one symbol is required'
});
return;
}
// Run the backtest
const result = await this.backtestService.runBacktest(backtestRequest);
// Enhance results with additional metrics
const enhancedResult = PerformanceAnalytics.enhanceResults(result);
// Calculate additional analytics
const monthlyReturns = PerformanceAnalytics.calculateMonthlyReturns(result.dailyReturns);
const drawdowns = PerformanceAnalytics.analyzeDrawdowns(result.dailyReturns);
res.json({
success: true,
data: {
...enhancedResult,
monthlyReturns,
drawdowns
}
});
} catch (error) {
console.error('Backtest error:', error);
res.status(500).json({
success: false,
error: (error as Error).message
});
}
}
/**
* Optimize a strategy with grid search
*/
public async optimizeStrategy(req: Request, res: Response): Promise<void> {
try {
const { baseRequest, parameterGrid } = req.body;
// Validate request
if (!baseRequest || !parameterGrid) {
res.status(400).json({
success: false,
error: 'Base request and parameter grid are required'
});
return;
}
// Run optimization
const results = await this.backtestService.optimizeStrategy(baseRequest, parameterGrid);
res.json({
success: true,
data: results
});
} catch (error) {
res.status(500).json({
success: false,
error: (error as Error).message
});
}
}
}
export default StrategyController;

View file

@ -1,287 +0,0 @@
import { EventEmitter } from 'events';
export interface BarData {
symbol: string;
timestamp: Date;
open: number;
high: number;
low: number;
close: number;
volume: number;
}
export interface Position {
symbol: string;
quantity: number;
avgPrice: number;
side: 'LONG' | 'SHORT';
entryTime: Date;
unrealizedPnL?: number;
realizedPnL?: number;
}
export interface Order {
id: string;
symbol: string;
side: 'BUY' | 'SELL';
quantity: number;
price?: number; // Market order if undefined
type: 'MARKET' | 'LIMIT' | 'STOP' | 'STOP_LIMIT';
status: 'PENDING' | 'FILLED' | 'CANCELLED' | 'REJECTED';
timestamp: Date;
fillPrice?: number;
fillTime?: Date;
}
export interface StrategyContext {
currentTime: Date;
portfolio: {
cash: number;
positions: Map<string, Position>;
totalValue: number;
};
marketData: Map<string, BarData[]>; // Historical data for each symbol
indicators: Map<string, any>; // Cached indicator values
}
export interface StrategyParameters {
[key: string]: number | string | boolean | any[];
}
export interface StrategyMetrics {
totalReturn: number;
totalTrades: number;
winningTrades: number;
losingTrades: number;
winRate: number;
avgWin: number;
avgLoss: number;
profitFactor: number;
sharpeRatio: number;
maxDrawdown: number;
maxDrawdownDuration: number;
calmarRatio: number;
sortinoRatio: number;
beta: number;
alpha: number;
volatility: number;
}
export abstract class BaseStrategy extends EventEmitter {
public readonly id: string;
public name: string;
public description: string;
public symbols: string[];
public parameters: StrategyParameters;
protected context: StrategyContext;
protected isInitialized: boolean = false;
constructor(
id: string,
name: string,
description: string,
symbols: string[],
parameters: StrategyParameters = {}
) {
super();
this.id = id;
this.name = name;
this.description = description;
this.symbols = symbols;
this.parameters = parameters;
this.context = {
currentTime: new Date(),
portfolio: {
cash: 100000, // Default starting capital
positions: new Map(),
totalValue: 100000
},
marketData: new Map(),
indicators: new Map()
};
}
// Abstract methods that must be implemented by strategy subclasses
abstract initialize(): Promise<void>;
abstract onBar(bar: BarData): Promise<Order[]>;
abstract onOrderFilled(order: Order): Promise<void>;
abstract cleanup(): Promise<void>;
// Lifecycle methods
async start(): Promise<void> {
if (!this.isInitialized) {
await this.initialize();
this.isInitialized = true;
}
this.emit('started', { strategyId: this.id });
}
async stop(): Promise<void> {
await this.cleanup();
this.emit('stopped', { strategyId: this.id });
}
// Market data management
addBar(bar: BarData): void {
this.context.currentTime = bar.timestamp;
if (!this.context.marketData.has(bar.symbol)) {
this.context.marketData.set(bar.symbol, []);
}
const bars = this.context.marketData.get(bar.symbol)!;
bars.push(bar);
// Keep only last 1000 bars to manage memory
if (bars.length > 1000) {
bars.shift();
}
}
// Portfolio management helpers
protected getCurrentPrice(symbol: string): number | null {
const bars = this.context.marketData.get(symbol);
return bars && bars.length > 0 ? bars[bars.length - 1].close : null;
}
protected getPosition(symbol: string): Position | null {
return this.context.portfolio.positions.get(symbol) || null;
}
protected hasPosition(symbol: string): boolean {
return this.context.portfolio.positions.has(symbol);
}
protected getAvailableCash(): number {
return this.context.portfolio.cash;
}
protected calculatePositionValue(symbol: string): number {
const position = this.getPosition(symbol);
const currentPrice = this.getCurrentPrice(symbol);
if (!position || !currentPrice) return 0;
return position.quantity * currentPrice;
}
protected updatePortfolioValue(): void {
let totalValue = this.context.portfolio.cash;
for (const [symbol, position] of this.context.portfolio.positions) {
const currentPrice = this.getCurrentPrice(symbol);
if (currentPrice) {
totalValue += position.quantity * currentPrice;
}
}
this.context.portfolio.totalValue = totalValue;
}
// Order creation helpers
protected createMarketOrder(symbol: string, side: 'BUY' | 'SELL', quantity: number): Order {
return {
id: this.generateOrderId(),
symbol,
side,
quantity: Math.abs(quantity),
type: 'MARKET',
status: 'PENDING',
timestamp: this.context.currentTime
};
}
protected createLimitOrder(
symbol: string,
side: 'BUY' | 'SELL',
quantity: number,
price: number
): Order {
return {
id: this.generateOrderId(),
symbol,
side,
quantity: Math.abs(quantity),
price,
type: 'LIMIT',
status: 'PENDING',
timestamp: this.context.currentTime
};
}
protected createStopOrder(
symbol: string,
side: 'BUY' | 'SELL',
quantity: number,
stopPrice: number
): Order {
return {
id: this.generateOrderId(),
symbol,
side,
quantity: Math.abs(quantity),
price: stopPrice,
type: 'STOP',
status: 'PENDING',
timestamp: this.context.currentTime
};
}
private generateOrderId(): string {
return `${this.id}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
}
// Utility methods for common strategy patterns
protected getBarsSince(symbol: string, periods: number): BarData[] {
const bars = this.context.marketData.get(symbol) || [];
return bars.slice(-periods);
}
protected getReturns(symbol: string, periods: number): number[] {
const bars = this.getBarsSince(symbol, periods + 1);
const returns: number[] = [];
for (let i = 1; i < bars.length; i++) {
const returnPct = (bars[i].close - bars[i - 1].close) / bars[i - 1].close;
returns.push(returnPct);
}
return returns;
}
protected getVolatility(symbol: string, periods: number): number {
const returns = this.getReturns(symbol, periods);
if (returns.length === 0) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
return Math.sqrt(variance * 252); // Annualized volatility
}
// Parameter validation
protected validateParameters(): boolean {
// Override in subclasses for parameter validation
return true;
}
// Get strategy state for serialization
getState() {
return {
id: this.id,
name: this.name,
description: this.description,
symbols: this.symbols,
parameters: this.parameters,
isInitialized: this.isInitialized,
currentTime: this.context.currentTime,
portfolio: {
cash: this.context.portfolio.cash,
totalValue: this.context.portfolio.totalValue,
positions: Array.from(this.context.portfolio.positions.entries())
}
};
}
}

View file

@ -1,362 +0,0 @@
import { BarData } from '../Strategy';
export class TechnicalIndicators {
/**
* Calculate Simple Moving Average (SMA)
* @param prices Array of price values
* @param period Number of periods for calculation
* @returns Array of SMA values
*/
static sma(prices: number[], period: number): number[] {
if (period <= 0 || prices.length === 0) return [];
const result: number[] = [];
// Not enough data for calculation
if (prices.length < period) {
return Array(prices.length).fill(NaN);
}
// Calculate first SMA
let sum = 0;
for (let i = 0; i < period; i++) {
sum += prices[i];
}
result.push(sum / period);
// Calculate subsequent SMAs using previous sum
for (let i = period; i < prices.length; i++) {
sum = sum - prices[i - period] + prices[i];
result.push(sum / period);
}
// Fill beginning with NaN
const nanValues = Array(period - 1).fill(NaN);
return [...nanValues, ...result];
}
/**
* Calculate Exponential Moving Average (EMA)
* @param prices Array of price values
* @param period Number of periods for calculation
* @returns Array of EMA values
*/
static ema(prices: number[], period: number): number[] {
if (period <= 0 || prices.length === 0) return [];
const result: number[] = [];
const multiplier = 2 / (period + 1);
// Not enough data for calculation
if (prices.length < period) {
return Array(prices.length).fill(NaN);
}
// Calculate SMA for first EMA value
let sum = 0;
for (let i = 0; i < period; i++) {
sum += prices[i];
}
// First EMA is SMA
let ema = sum / period;
result.push(ema);
// Calculate subsequent EMAs
for (let i = period; i < prices.length; i++) {
ema = (prices[i] - ema) * multiplier + ema;
result.push(ema);
}
// Fill beginning with NaN
const nanValues = Array(period - 1).fill(NaN);
return [...nanValues, ...result];
}
/**
* Calculate Relative Strength Index (RSI)
* @param prices Array of price values
* @param period Number of periods for calculation
* @returns Array of RSI values
*/
static rsi(prices: number[], period: number): number[] {
if (period <= 0 || prices.length < period + 1) {
return Array(prices.length).fill(NaN);
}
const result: number[] = [];
const gains: number[] = [];
const losses: number[] = [];
// Calculate price changes
for (let i = 1; i < prices.length; i++) {
const change = prices[i] - prices[i - 1];
gains.push(change > 0 ? change : 0);
losses.push(change < 0 ? Math.abs(change) : 0);
}
// Not enough data
if (gains.length < period) {
return Array(prices.length).fill(NaN);
}
// Calculate first average gain and loss
let avgGain = 0;
let avgLoss = 0;
for (let i = 0; i < period; i++) {
avgGain += gains[i];
avgLoss += losses[i];
}
avgGain /= period;
avgLoss /= period;
// Calculate first RSI
let rs = avgGain / (avgLoss === 0 ? 0.001 : avgLoss); // Avoid division by zero
let rsi = 100 - (100 / (1 + rs));
result.push(rsi);
// Calculate subsequent RSIs
for (let i = period; i < gains.length; i++) {
// Smooth averages
avgGain = ((avgGain * (period - 1)) + gains[i]) / period;
avgLoss = ((avgLoss * (period - 1)) + losses[i]) / period;
// Calculate RS and RSI
rs = avgGain / (avgLoss === 0 ? 0.001 : avgLoss);
rsi = 100 - (100 / (1 + rs));
result.push(rsi);
}
// Fill beginning with NaN
const nanValues = Array(period).fill(NaN);
return [...nanValues, ...result];
}
/**
* Calculate Moving Average Convergence Divergence (MACD)
* @param prices Array of price values
* @param fastPeriod Fast EMA period (default: 12)
* @param slowPeriod Slow EMA period (default: 26)
* @param signalPeriod Signal line period (default: 9)
* @returns Object containing MACD line, signal line, and histogram
*/
static macd(
prices: number[],
fastPeriod: number = 12,
slowPeriod: number = 26,
signalPeriod: number = 9
): { macdLine: number[], signalLine: number[], histogram: number[] } {
// Calculate EMAs
const fastEMA = this.ema(prices, fastPeriod);
const slowEMA = this.ema(prices, slowPeriod);
// Calculate MACD line (fast EMA - slow EMA)
const macdLine: number[] = [];
for (let i = 0; i < prices.length; i++) {
macdLine.push(isNaN(fastEMA[i]) || isNaN(slowEMA[i])
? NaN
: fastEMA[i] - slowEMA[i]);
}
// Calculate signal line (EMA of MACD line)
const signalLine = this.ema(macdLine.filter(val => !isNaN(val)), signalPeriod);
// Pad signal line with NaNs to match original length
const paddedSignalLine = Array(prices.length - signalLine.length).fill(NaN).concat(signalLine);
// Calculate histogram (MACD line - signal line)
const histogram: number[] = [];
for (let i = 0; i < prices.length; i++) {
histogram.push(isNaN(macdLine[i]) || isNaN(paddedSignalLine[i])
? NaN
: macdLine[i] - paddedSignalLine[i]);
}
return {
macdLine,
signalLine: paddedSignalLine,
histogram
};
}
/**
* Calculate Bollinger Bands
* @param prices Array of price values
* @param period SMA period (default: 20)
* @param stdDevMultiplier Standard deviation multiplier (default: 2)
* @returns Object containing upper band, middle band, and lower band
*/
static bollingerBands(
prices: number[],
period: number = 20,
stdDevMultiplier: number = 2
): { upper: number[], middle: number[], lower: number[] } {
// Calculate middle band (SMA)
const middle = this.sma(prices, period);
// Calculate standard deviation for each point
const upper: number[] = [];
const lower: number[] = [];
for (let i = 0; i < prices.length; i++) {
if (isNaN(middle[i])) {
upper.push(NaN);
lower.push(NaN);
continue;
}
// Calculate standard deviation using values in the period window
let stdDev = 0;
let count = 0;
// Start index for the window
const startIdx = Math.max(0, i - period + 1);
for (let j = startIdx; j <= i; j++) {
stdDev += Math.pow(prices[j] - middle[i], 2);
count++;
}
stdDev = Math.sqrt(stdDev / count);
// Calculate bands
upper.push(middle[i] + (stdDevMultiplier * stdDev));
lower.push(middle[i] - (stdDevMultiplier * stdDev));
}
return { upper, middle, lower };
}
/**
* Calculate Average True Range (ATR)
* @param bars Array of BarData objects
* @param period Number of periods for calculation
* @returns Array of ATR values
*/
static atr(bars: BarData[], period: number): number[] {
if (period <= 0 || bars.length < 2) {
return Array(bars.length).fill(NaN);
}
// Calculate True Range for each bar
const trueRanges: number[] = [];
// First TR is high - low
trueRanges.push(bars[0].high - bars[0].low);
// Calculate remaining TRs
for (let i = 1; i < bars.length; i++) {
const currentHigh = bars[i].high;
const currentLow = bars[i].low;
const previousClose = bars[i - 1].close;
const tr1 = currentHigh - currentLow;
const tr2 = Math.abs(currentHigh - previousClose);
const tr3 = Math.abs(currentLow - previousClose);
const tr = Math.max(tr1, tr2, tr3);
trueRanges.push(tr);
}
// Calculate ATR (first value is simple average)
const result: number[] = [];
// Not enough data
if (trueRanges.length < period) {
return Array(bars.length).fill(NaN);
}
// First ATR is simple average of true ranges
let atr = 0;
for (let i = 0; i < period; i++) {
atr += trueRanges[i];
}
atr /= period;
result.push(atr);
// Calculate subsequent ATRs using smoothing
for (let i = period; i < trueRanges.length; i++) {
atr = ((atr * (period - 1)) + trueRanges[i]) / period;
result.push(atr);
}
// Fill beginning with NaN
const nanValues = Array(period).fill(NaN);
return [...nanValues, ...result];
}
/**
* Calculate Stochastic Oscillator
* @param bars Array of BarData objects
* @param period %K period (default: 14)
* @param smoothK %K smoothing (default: 3)
* @param smoothD %D period (default: 3)
* @returns Object containing %K and %D values
*/
static stochastic(
bars: BarData[],
period: number = 14,
smoothK: number = 3,
smoothD: number = 3
): { k: number[], d: number[] } {
if (period <= 0 || bars.length < period) {
return { k: Array(bars.length).fill(NaN), d: Array(bars.length).fill(NaN) };
}
const rawK: number[] = [];
// Calculate raw %K values
for (let i = period - 1; i < bars.length; i++) {
let highest = -Infinity;
let lowest = Infinity;
// Find highest high and lowest low in the period
for (let j = i - (period - 1); j <= i; j++) {
highest = Math.max(highest, bars[j].high);
lowest = Math.min(lowest, bars[j].low);
}
// Calculate raw %K
const currentClose = bars[i].close;
const rawKValue = 100 * ((currentClose - lowest) / (highest - lowest));
rawK.push(rawKValue);
}
// Fill beginning with NaN
const nanValues = Array(period - 1).fill(NaN);
const fullRawK = [...nanValues, ...rawK];
// Apply smoothing to %K (SMA of raw %K)
const filteredK = fullRawK.filter(val => !isNaN(val));
let k = this.sma(filteredK, smoothK);
// Pad with NaNs
k = [...Array(fullRawK.length - k.length).fill(NaN), ...k];
// Calculate %D (SMA of %K)
const filteredSmoothedK = k.filter(val => !isNaN(val));
let d = this.sma(filteredSmoothedK, smoothD);
// Pad with NaNs
d = [...Array(k.length - d.length).fill(NaN), ...d];
return { k, d };
}
/**
* Extract specific price from bars (e.g., close, open, high, low)
* @param bars Array of BarData objects
* @param field Price field to extract
* @returns Array of extracted price values
*/
static extractPrice(bars: BarData[], field: 'open' | 'high' | 'low' | 'close' = 'close'): number[] {
return bars.map(bar => bar[field]);
}
}

View file

@ -1,604 +0,0 @@
import { EventEmitter } from 'events';
import { BaseStrategy } from '../Strategy';
import { BarData, Order, Position } from '../Strategy';
export interface BacktestConfig {
startDate: Date;
endDate: Date;
symbols: string[];
initialCapital: number;
commission: number; // Per trade commission (percentage)
slippage: number; // Slippage model (percentage)
dataResolution: '1m' | '5m' | '15m' | '30m' | '1h' | '4h' | '1d';
mode: 'event' | 'vector';
}
export interface BacktestResult {
strategyId: string;
startDate: Date;
endDate: Date;
duration: number; // In milliseconds
initialCapital: number;
finalCapital: number;
totalReturn: number;
annualizedReturn: number;
sharpeRatio: number;
maxDrawdown: number;
maxDrawdownDuration: number; // In days
winRate: number;
totalTrades: number;
winningTrades: number;
losingTrades: number;
averageWinningTrade: number;
averageLosingTrade: number;
profitFactor: number;
dailyReturns: Array<{ date: Date; return: number }>;
trades: Array<{
symbol: string;
entryTime: Date;
entryPrice: number;
exitTime: Date;
exitPrice: number;
quantity: number;
pnl: number;
pnlPercent: number;
}>;
}
export interface BacktestProgress {
progress: number; // 0-100
currentDate: Date;
processingSpeed: number; // Bars per second
estimatedTimeRemaining: number; // milliseconds
currentCapital: number;
currentReturn: number;
currentDrawdown: number;
}
export interface DataFeed {
getHistoricalData(symbol: string, resolution: string, start: Date, end: Date): Promise<BarData[]>;
hasDataFor(symbol: string, resolution: string, start: Date, end: Date): Promise<boolean>;
}
export class BacktestEngine extends EventEmitter {
private config: BacktestConfig;
private strategy: BaseStrategy;
private dataFeed: DataFeed;
private isRunning: boolean = false;
private barBuffer: Map<string, BarData[]> = new Map();
private pendingOrders: Order[] = [];
private filledOrders: Order[] = [];
private currentTime: Date;
private startTime: number = 0; // For performance tracking
private processedBars: number = 0;
private marketData: Map<string, BarData[]> = new Map();
// Results tracking
private initialCapital: number;
private currentCapital: number;
private positions = new Map<string, Position>();
private trades: BacktestResult['trades'] = [];
private dailyReturns: BacktestResult['dailyReturns'] = [];
private previousPortfolioValue: number;
private highWaterMark: number;
private maxDrawdown: number = 0;
private drawdownStartTime: Date | null = null;
private maxDrawdownDuration: number = 0;
private winningTrades: number = 0;
private losingTrades: number = 0;
private breakEvenTrades: number = 0;
private totalProfits: number = 0;
private totalLosses: number = 0;
constructor(strategy: BaseStrategy, config: BacktestConfig, dataFeed: DataFeed) {
super();
this.strategy = strategy;
this.config = config;
this.dataFeed = dataFeed;
this.currentTime = new Date(config.startDate);
this.initialCapital = config.initialCapital;
this.currentCapital = config.initialCapital;
this.previousPortfolioValue = config.initialCapital;
this.highWaterMark = config.initialCapital;
}
async run(): Promise<BacktestResult> {
if (this.isRunning) {
throw new Error('Backtest is already running');
}
this.isRunning = true;
this.startTime = Date.now();
this.emit('started', { strategyId: this.strategy.id, config: this.config });
try {
// Load data based on configured mode
if (this.config.mode === 'event') {
await this.runEventBased();
} else {
await this.runVectorized();
}
const result = this.generateResults();
this.emit('completed', { strategyId: this.strategy.id, result });
this.isRunning = false;
return result;
} catch (error) {
this.isRunning = false;
this.emit('error', { strategyId: this.strategy.id, error });
throw error;
}
}
private async runEventBased(): Promise<void> {
// Load market data for all symbols
await this.loadMarketData();
// Initialize the strategy
await this.strategy.start();
// Create a merged timeline of all bars across all symbols, sorted by timestamp
const timeline = this.createMergedTimeline();
// Process each event in chronological order
let lastProgressUpdate = Date.now();
let prevDate = new Date(0);
for (let i = 0; i < timeline.length; i++) {
const bar = timeline[i];
this.currentTime = bar.timestamp;
// Process any pending orders
await this.processOrders(bar);
// Update positions with current prices
this.updatePositions(bar);
// If we've crossed to a new day, calculate daily return
if (this.currentTime.toDateString() !== prevDate.toDateString()) {
this.calculateDailyReturn();
prevDate = this.currentTime;
}
// Send the new bar to the strategy
const orders = await this.strategy.onBar(bar);
// Add any new orders to the pending orders queue
if (orders && orders.length > 0) {
this.pendingOrders.push(...orders);
}
// Update progress periodically
if (Date.now() - lastProgressUpdate > 1000) { // Update every second
this.updateProgress(i / timeline.length);
lastProgressUpdate = Date.now();
}
}
// Process any remaining orders
for (const order of this.pendingOrders) {
await this.processOrder(order);
}
// Close any remaining positions at the last known price
await this.closeAllPositions();
// Clean up strategy
await this.strategy.stop();
}
private async runVectorized(): Promise<void> {
// Load market data for all symbols
await this.loadMarketData();
// To implement a vectorized approach, we need to:
// 1. Pre-compute technical indicators
// 2. Generate buy/sell signals for the entire dataset
// 3. Calculate portfolio values based on signals
// This is a simplified implementation since specific vectorized strategies
// will need to be implemented separately based on the strategy type
const timeline = this.createMergedTimeline();
const startTime = Date.now();
// Initialize variables for tracking performance
let currentPositions = new Map<string, number>();
let currentCash = this.initialCapital;
let prevPortfolioValue = this.initialCapital;
let highWaterMark = this.initialCapital;
let maxDrawdown = 0;
let maxDrawdownStartDate = new Date();
let maxDrawdownEndDate = new Date();
let currentDrawdownStart = new Date();
// Pre-process data (this would be implemented by the specific strategy)
const allBars = new Map<string, BarData[]>();
for (const symbol of this.config.symbols) {
allBars.set(symbol, this.marketData.get(symbol) || []);
}
// Apply strategy logic (vectorized implementation would be here)
// For now, we'll just simulate the processing
this.emit('completed', { message: 'Vectorized backtest completed in fast mode' });
}
private async loadMarketData(): Promise<void> {
for (const symbol of this.config.symbols) {
this.emit('loading', { symbol, resolution: this.config.dataResolution });
// Check if data is available
const hasData = await this.dataFeed.hasDataFor(
symbol,
this.config.dataResolution,
this.config.startDate,
this.config.endDate
);
if (!hasData) {
throw new Error(`No data available for ${symbol} at resolution ${this.config.dataResolution}`);
}
// Load data
const data = await this.dataFeed.getHistoricalData(
symbol,
this.config.dataResolution,
this.config.startDate,
this.config.endDate
);
this.marketData.set(symbol, data);
this.emit('loaded', { symbol, count: data.length });
}
}
private createMergedTimeline(): BarData[] {
const allBars: BarData[] = [];
for (const [symbol, bars] of this.marketData.entries()) {
allBars.push(...bars);
}
// Sort by timestamp
return allBars.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
}
private async processOrders(currentBar: BarData): Promise<void> {
// Find orders for the current symbol
const ordersToProcess = this.pendingOrders.filter(order => order.symbol === currentBar.symbol);
if (ordersToProcess.length === 0) return;
// Remove these orders from pendingOrders
this.pendingOrders = this.pendingOrders.filter(order => order.symbol !== currentBar.symbol);
// Process each order
for (const order of ordersToProcess) {
await this.processOrder(order);
}
}
private async processOrder(order: Order): Promise<void> {
// Get the latest price for the symbol
const latestBars = this.marketData.get(order.symbol);
if (!latestBars || latestBars.length === 0) {
order.status = 'REJECTED';
this.emit('orderRejected', { order, reason: 'No market data available' });
return;
}
// Find the bar closest to the order time
const bar = latestBars.find(b =>
b.timestamp.getTime() >= order.timestamp.getTime()
) || latestBars[latestBars.length - 1];
// Calculate fill price with slippage
let fillPrice: number;
if (order.type === 'MARKET') {
// Apply slippage model
const slippageFactor = 1 + (order.side === 'BUY' ? this.config.slippage : -this.config.slippage);
fillPrice = bar.close * slippageFactor;
} else if (order.type === 'LIMIT' && order.price !== undefined) {
// For limit orders, check if the price was reached
if ((order.side === 'BUY' && bar.low <= order.price) ||
(order.side === 'SELL' && bar.high >= order.price)) {
fillPrice = order.price;
} else {
// Limit price not reached
return;
}
} else {
// Other order types not implemented
order.status = 'REJECTED';
this.emit('orderRejected', { order, reason: 'Order type not supported' });
return;
}
// Calculate commission
const orderValue = order.quantity * fillPrice;
const commission = orderValue * this.config.commission;
// Check if we have enough cash for BUY orders
if (order.side === 'BUY') {
const totalCost = orderValue + commission;
if (totalCost > this.currentCapital) {
// Not enough cash
order.status = 'REJECTED';
this.emit('orderRejected', { order, reason: 'Insufficient funds' });
return;
}
// Update cash
this.currentCapital -= totalCost;
// Update or create position
const existingPosition = this.positions.get(order.symbol);
if (existingPosition) {
// Update existing position (average down)
const totalShares = existingPosition.quantity + order.quantity;
const totalCost = (existingPosition.quantity * existingPosition.avgPrice) + (order.quantity * fillPrice);
existingPosition.avgPrice = totalCost / totalShares;
existingPosition.quantity = totalShares;
} else {
// Create new position
this.positions.set(order.symbol, {
symbol: order.symbol,
quantity: order.quantity,
avgPrice: fillPrice,
side: 'LONG',
entryTime: this.currentTime
});
}
} else if (order.side === 'SELL') {
const position = this.positions.get(order.symbol);
if (!position || position.quantity < order.quantity) {
// Not enough shares to sell
order.status = 'REJECTED';
this.emit('orderRejected', { order, reason: 'Insufficient position' });
return;
}
// Calculate P&L
const pnl = (fillPrice - position.avgPrice) * order.quantity;
// Update cash
this.currentCapital += orderValue - commission;
// Update position
position.quantity -= order.quantity;
if (position.quantity === 0) {
// Position closed, record the trade
this.positions.delete(order.symbol);
this.trades.push({
symbol: order.symbol,
entryTime: position.entryTime,
entryPrice: position.avgPrice,
exitTime: this.currentTime,
exitPrice: fillPrice,
quantity: order.quantity,
pnl: pnl,
pnlPercent: (pnl / (position.avgPrice * order.quantity)) * 100
});
// Update statistics
if (pnl > 0) {
this.winningTrades++;
this.totalProfits += pnl;
} else if (pnl < 0) {
this.losingTrades++;
this.totalLosses -= pnl; // Make positive for easier calculations
} else {
this.breakEvenTrades++;
}
}
}
// Mark order as filled
order.status = 'FILLED';
order.fillPrice = fillPrice;
order.fillTime = this.currentTime;
this.filledOrders.push(order);
// Notify strategy
await this.strategy.onOrderFilled(order);
this.emit('orderFilled', { order });
}
private updatePositions(currentBar: BarData): void {
// Update the unrealized P&L for positions in this symbol
const position = this.positions.get(currentBar.symbol);
if (position) {
const currentPrice = currentBar.close;
const unrealizedPnL = (currentPrice - position.avgPrice) * position.quantity;
position.unrealizedPnL = unrealizedPnL;
}
// Calculate total portfolio value
const portfolioValue = this.calculatePortfolioValue();
// Check for new high water mark
if (portfolioValue > this.highWaterMark) {
this.highWaterMark = portfolioValue;
this.drawdownStartTime = null;
}
// Check for drawdown
if (this.drawdownStartTime === null && portfolioValue < this.highWaterMark) {
this.drawdownStartTime = this.currentTime;
}
// Update max drawdown
if (this.highWaterMark > 0) {
const currentDrawdown = (this.highWaterMark - portfolioValue) / this.highWaterMark;
if (currentDrawdown > this.maxDrawdown) {
this.maxDrawdown = currentDrawdown;
// Calculate drawdown duration
if (this.drawdownStartTime !== null) {
const drawdownDuration = (this.currentTime.getTime() - this.drawdownStartTime.getTime()) / (1000 * 60 * 60 * 24); // In days
if (drawdownDuration > this.maxDrawdownDuration) {
this.maxDrawdownDuration = drawdownDuration;
}
}
}
}
this.previousPortfolioValue = portfolioValue;
}
private calculatePortfolioValue(): number {
let totalValue = this.currentCapital;
// Add the current value of all positions
for (const [symbol, position] of this.positions.entries()) {
// Find the latest price for this symbol
const bars = this.marketData.get(symbol);
if (bars && bars.length > 0) {
const latestBar = bars[bars.length - 1];
totalValue += position.quantity * latestBar.close;
} else {
// If no price data, use the average price (not ideal but better than nothing)
totalValue += position.quantity * position.avgPrice;
}
}
return totalValue;
}
private calculateDailyReturn(): void {
const portfolioValue = this.calculatePortfolioValue();
const dailyReturn = (portfolioValue - this.previousPortfolioValue) / this.previousPortfolioValue;
this.dailyReturns.push({
date: new Date(this.currentTime),
return: dailyReturn
});
this.previousPortfolioValue = portfolioValue;
}
private async closeAllPositions(): Promise<void> {
for (const [symbol, position] of this.positions.entries()) {
// Find the latest price
const bars = this.marketData.get(symbol);
if (!bars || bars.length === 0) continue;
const lastBar = bars[bars.length - 1];
const closePrice = lastBar.close;
// Calculate P&L
const pnl = (closePrice - position.avgPrice) * position.quantity;
// Update cash
this.currentCapital += position.quantity * closePrice;
// Record the trade
this.trades.push({
symbol,
entryTime: position.entryTime,
entryPrice: position.avgPrice,
exitTime: this.currentTime,
exitPrice: closePrice,
quantity: position.quantity,
pnl,
pnlPercent: (pnl / (position.avgPrice * position.quantity)) * 100
});
// Update statistics
if (pnl > 0) {
this.winningTrades++;
this.totalProfits += pnl;
} else if (pnl < 0) {
this.losingTrades++;
this.totalLosses -= pnl; // Make positive for easier calculations
} else {
this.breakEvenTrades++;
}
}
// Clear positions
this.positions.clear();
}
private updateProgress(progress: number): void {
const currentPortfolioValue = this.calculatePortfolioValue();
const currentDrawdown = this.highWaterMark > 0
? (this.highWaterMark - currentPortfolioValue) / this.highWaterMark
: 0;
const elapsedMs = Date.now() - this.startTime;
const totalEstimatedMs = elapsedMs / progress;
const remainingMs = totalEstimatedMs - elapsedMs;
this.emit('progress', {
progress: progress * 100,
currentDate: this.currentTime,
processingSpeed: this.processedBars / (elapsedMs / 1000),
estimatedTimeRemaining: remainingMs,
currentCapital: this.currentCapital,
currentReturn: (currentPortfolioValue - this.initialCapital) / this.initialCapital,
currentDrawdown
} as BacktestProgress);
}
private generateResults(): BacktestResult {
const currentPortfolioValue = this.calculatePortfolioValue();
const totalReturn = (currentPortfolioValue - this.initialCapital) / this.initialCapital;
// Calculate annualized return
const days = (this.config.endDate.getTime() - this.config.startDate.getTime()) / (1000 * 60 * 60 * 24);
const annualizedReturn = Math.pow(1 + totalReturn, 365 / days) - 1;
// Calculate Sharpe Ratio
let sharpeRatio = 0;
if (this.dailyReturns.length > 1) {
const dailyReturnValues = this.dailyReturns.map(dr => dr.return);
const avgDailyReturn = dailyReturnValues.reduce((sum, ret) => sum + ret, 0) / dailyReturnValues.length;
const stdDev = Math.sqrt(
dailyReturnValues.reduce((sum, ret) => sum + Math.pow(ret - avgDailyReturn, 2), 0) / dailyReturnValues.length
);
// Annualize
sharpeRatio = stdDev > 0
? (avgDailyReturn * 252) / (stdDev * Math.sqrt(252))
: 0;
}
// Calculate win rate and profit factor
const totalTrades = this.winningTrades + this.losingTrades + this.breakEvenTrades;
const winRate = totalTrades > 0 ? this.winningTrades / totalTrades : 0;
const profitFactor = this.totalLosses > 0 ? this.totalProfits / this.totalLosses : (this.totalProfits > 0 ? Infinity : 0);
// Calculate average winning and losing trade
const avgWinningTrade = this.winningTrades > 0 ? this.totalProfits / this.winningTrades : 0;
const avgLosingTrade = this.losingTrades > 0 ? this.totalLosses / this.losingTrades : 0;
return {
strategyId: this.strategy.id,
startDate: this.config.startDate,
endDate: this.config.endDate,
duration: Date.now() - this.startTime,
initialCapital: this.initialCapital,
finalCapital: currentPortfolioValue,
totalReturn,
annualizedReturn,
sharpeRatio,
maxDrawdown: this.maxDrawdown,
maxDrawdownDuration: this.maxDrawdownDuration,
winRate,
totalTrades,
winningTrades: this.winningTrades,
losingTrades: this.losingTrades,
averageWinningTrade: avgWinningTrade,
averageLosingTrade: avgLosingTrade,
profitFactor,
dailyReturns: this.dailyReturns,
trades: this.trades
};
}
}

View file

@ -1,186 +0,0 @@
import { BaseStrategy } from '../Strategy';
import { BacktestConfig, BacktestEngine, BacktestResult } from './BacktestEngine';
import { MarketDataFeed } from './MarketDataFeed';
import { StrategyRegistry, StrategyType } from '../strategies/StrategyRegistry';
export interface BacktestRequest {
strategyType: StrategyType;
strategyParams: Record<string, any>;
symbols: string[];
startDate: Date | string;
endDate: Date | string;
initialCapital: number;
dataResolution: '1m' | '5m' | '15m' | '30m' | '1h' | '4h' | '1d';
commission: number;
slippage: number;
mode: 'event' | 'vector';
}
/**
* Backtesting Service
*
* A service that handles backtesting requests and manages backtesting sessions.
*/
export class BacktestService {
private readonly strategyRegistry: StrategyRegistry;
private readonly dataFeed: MarketDataFeed;
private readonly activeBacktests: Map<string, BacktestEngine> = new Map();
constructor(apiBaseUrl: string = 'http://localhost:3001/api') {
this.strategyRegistry = StrategyRegistry.getInstance();
this.dataFeed = new MarketDataFeed(apiBaseUrl);
}
/**
* Run a backtest based on a request
*/
async runBacktest(request: BacktestRequest): Promise<BacktestResult> {
// Create a strategy instance
const strategyId = `backtest_${Date.now()}`;
const strategy = this.strategyRegistry.createStrategy(
request.strategyType,
strategyId,
`Backtest ${request.strategyType}`,
`Generated backtest for ${request.symbols.join(', ')}`,
request.symbols,
request.strategyParams
);
// Parse dates if they are strings
const startDate = typeof request.startDate === 'string'
? new Date(request.startDate)
: request.startDate;
const endDate = typeof request.endDate === 'string'
? new Date(request.endDate)
: request.endDate;
// Create backtest configuration
const config: BacktestConfig = {
startDate,
endDate,
symbols: request.symbols,
initialCapital: request.initialCapital,
commission: request.commission,
slippage: request.slippage,
dataResolution: request.dataResolution,
mode: request.mode
};
// Create and run the backtest engine
const engine = new BacktestEngine(strategy, config, this.dataFeed);
this.activeBacktests.set(strategyId, engine);
try {
// Set up event forwarding
const forwardEvents = (eventName: string) => {
engine.on(eventName, (data) => {
console.log(`[Backtest ${strategyId}] ${eventName}:`, data);
});
};
forwardEvents('started');
forwardEvents('loading');
forwardEvents('loaded');
forwardEvents('progress');
forwardEvents('orderFilled');
forwardEvents('orderRejected');
forwardEvents('completed');
forwardEvents('error');
// Run the backtest
const result = await engine.run();
// Clean up
this.activeBacktests.delete(strategyId);
return result;
} catch (error) {
this.activeBacktests.delete(strategyId);
throw error;
}
}
/**
* Optimize a strategy by running multiple backtests with different parameters
*/
async optimizeStrategy(
baseRequest: BacktestRequest,
parameterGrid: Record<string, any[]>
): Promise<Array<BacktestResult & { parameters: Record<string, any> }>> {
const results: Array<BacktestResult & { parameters: Record<string, any> }> = [];
// Generate parameter combinations
const paramKeys = Object.keys(parameterGrid);
const combinations = this.generateParameterCombinations(parameterGrid, paramKeys);
// Run backtest for each combination
for (const paramSet of combinations) {
const request = {
...baseRequest,
strategyParams: {
...baseRequest.strategyParams,
...paramSet
}
};
try {
const result = await this.runBacktest(request);
results.push({
...result,
parameters: paramSet
});
} catch (error) {
console.error(`Optimization failed for parameters:`, paramSet, error);
}
}
// Sort by performance metric (e.g., Sharpe ratio)
return results.sort((a, b) => b.sharpeRatio - a.sharpeRatio);
}
/**
* Generate all combinations of parameters for grid search
*/
private generateParameterCombinations(
grid: Record<string, any[]>,
keys: string[],
current: Record<string, any> = {},
index: number = 0,
result: Record<string, any>[] = []
): Record<string, any>[] {
if (index === keys.length) {
result.push({ ...current });
return result;
}
const key = keys[index];
const values = grid[key];
for (const value of values) {
current[key] = value;
this.generateParameterCombinations(grid, keys, current, index + 1, result);
}
return result;
}
/**
* Get an active backtest engine by ID
*/
getBacktestEngine(id: string): BacktestEngine | undefined {
return this.activeBacktests.get(id);
}
/**
* Cancel a running backtest
*/
cancelBacktest(id: string): boolean {
const engine = this.activeBacktests.get(id);
if (!engine) return false;
// No explicit cancel method on engine, but we can clean up
this.activeBacktests.delete(id);
return true;
}
}

View file

@ -1,166 +0,0 @@
import { BarData } from '../Strategy';
import { DataFeed } from './BacktestEngine';
import axios from 'axios';
export class MarketDataFeed implements DataFeed {
private readonly apiBaseUrl: string;
private cache: Map<string, BarData[]> = new Map();
constructor(apiBaseUrl: string = 'http://localhost:3001/api') {
this.apiBaseUrl = apiBaseUrl;
}
async getHistoricalData(symbol: string, resolution: string, start: Date, end: Date): Promise<BarData[]> {
const cacheKey = this.getCacheKey(symbol, resolution, start, end);
// Check cache first
if (this.cache.has(cacheKey)) {
return this.cache.get(cacheKey)!;
}
try {
// Format dates for API request
const startStr = start.toISOString();
const endStr = end.toISOString();
const response = await axios.get(`${this.apiBaseUrl}/market-data/history`, {
params: {
symbol,
resolution,
start: startStr,
end: endStr
}
});
if (!response.data.success || !response.data.data) {
throw new Error(`Failed to fetch historical data for ${symbol}`);
}
// Transform API response to BarData objects
const bars: BarData[] = response.data.data.map((bar: any) => ({
symbol,
timestamp: new Date(bar.timestamp),
open: bar.open,
high: bar.high,
low: bar.low,
close: bar.close,
volume: bar.volume
}));
// Cache the result
this.cache.set(cacheKey, bars);
return bars;
} catch (error) {
console.error(`Error fetching historical data for ${symbol}:`, error);
// Return fallback test data if API call fails
return this.generateFallbackTestData(symbol, resolution, start, end);
}
}
async hasDataFor(symbol: string, resolution: string, start: Date, end: Date): Promise<boolean> {
try {
const startStr = start.toISOString();
const endStr = end.toISOString();
const response = await axios.get(`${this.apiBaseUrl}/market-data/available`, {
params: {
symbol,
resolution,
start: startStr,
end: endStr
}
});
return response.data.success && response.data.data.available;
} catch (error) {
console.error(`Error checking data availability for ${symbol}:`, error);
// Assume data is available for test purposes
return true;
}
}
clearCache(): void {
this.cache.clear();
}
private getCacheKey(symbol: string, resolution: string, start: Date, end: Date): string {
return `${symbol}_${resolution}_${start.getTime()}_${end.getTime()}`;
}
private generateFallbackTestData(symbol: string, resolution: string, start: Date, end: Date): BarData[] {
console.warn(`Generating fallback test data for ${symbol} from ${start} to ${end}`);
const bars: BarData[] = [];
let current = new Date(start);
let basePrice = this.getBasePrice(symbol);
// Generate daily bars by default
const interval = this.getIntervalFromResolution(resolution);
while (current.getTime() <= end.getTime()) {
// Only generate bars for trading days (skip weekends)
if (current.getDay() !== 0 && current.getDay() !== 6) {
// Generate a random daily price movement (-1% to +1%)
const dailyChange = (Math.random() * 2 - 1) / 100;
// Add some randomness to the volatility
const volatility = 0.005 + Math.random() * 0.01; // 0.5% to 1.5%
const open = basePrice * (1 + (Math.random() * 0.002 - 0.001));
const close = open * (1 + dailyChange);
const high = Math.max(open, close) * (1 + Math.random() * volatility);
const low = Math.min(open, close) * (1 - Math.random() * volatility);
const volume = Math.floor(100000 + Math.random() * 900000);
bars.push({
symbol,
timestamp: new Date(current),
open,
high,
low,
close,
volume
});
// Update base price for next bar
basePrice = close;
}
// Move to next interval
current = new Date(current.getTime() + interval);
}
return bars;
}
private getBasePrice(symbol: string): number {
// Return a realistic base price for common symbols
switch (symbol.toUpperCase()) {
case 'AAPL': return 170 + Math.random() * 30;
case 'MSFT': return 370 + Math.random() * 50;
case 'AMZN': return 140 + Math.random() * 20;
case 'GOOGL': return 130 + Math.random() * 20;
case 'META': return 300 + Math.random() * 50;
case 'TSLA': return 180 + Math.random() * 70;
case 'NVDA': return 700 + Math.random() * 200;
case 'SPY': return 450 + Math.random() * 30;
case 'QQQ': return 370 + Math.random() * 40;
default: return 100 + Math.random() * 50;
}
}
private getIntervalFromResolution(resolution: string): number {
// Return milliseconds for each resolution
switch (resolution) {
case '1m': return 60 * 1000;
case '5m': return 5 * 60 * 1000;
case '15m': return 15 * 60 * 1000;
case '30m': return 30 * 60 * 1000;
case '1h': return 60 * 60 * 1000;
case '4h': return 4 * 60 * 60 * 1000;
case '1d': return 24 * 60 * 60 * 1000;
default: return 24 * 60 * 60 * 1000; // Default to daily
}
}
}

Some files were not shown because too many files have changed in this diff Show more