refactored monorepo for more projects

This commit is contained in:
Boki 2025-06-22 23:48:01 -04:00
parent 4632c174dc
commit 9492f1b15e
180 changed files with 1438 additions and 424 deletions

124
apps/stock/README.md Normal file
View file

@ -0,0 +1,124 @@
# Stock Trading Bot Application
A comprehensive stock trading bot application with multiple microservices for data ingestion, processing, and API access.
## Architecture
The stock bot consists of the following services:
- **Config**: Centralized configuration management
- **Data Ingestion**: Handles real-time and historical data collection
- **Data Pipeline**: Processes and transforms market data
- **Web API**: RESTful API for accessing stock data
- **Web App**: Frontend user interface
## Quick Start
### Prerequisites
- Node.js >= 18.0.0
- Bun >= 1.1.0
- Turbo
- PostgreSQL, MongoDB, QuestDB, and Redis/Dragonfly running locally
### Installation
```bash
# Install all dependencies
bun install
# Build the configuration package first
bun run build:config
```
### Development
```bash
# Run all services in development mode (using Turbo)
bun run dev
# Run only backend services
bun run dev:backend
# Run only frontend
bun run dev:frontend
# Run specific service
bun run dev:ingestion
bun run dev:pipeline
bun run dev:api
bun run dev:web
```
### Production
```bash
# Build all services (using Turbo)
bun run build
# Start with PM2
bun run pm2:start
# Check status
bun run pm2:status
# View logs
bun run pm2:logs
```
### Configuration
Configuration is managed centrally in the `config` package.
- Default config: `config/config/default.json`
- Environment-specific: `config/config/[environment].json`
- Environment variables: Can override any config value
### Health Checks
```bash
# Check all services health
bun run health:check
```
### Database Management
```bash
# Run migrations
bun run db:migrate
# Seed database
bun run db:seed
```
## Available Scripts
| Script | Description |
|--------|-------------|
| `dev` | Run all services in development mode |
| `build` | Build all services |
| `start` | Start all backend services |
| `test` | Run tests for all services |
| `lint` | Lint all services |
| `clean` | Clean build artifacts and dependencies |
| `docker:build` | Build Docker images |
| `pm2:start` | Start services with PM2 |
| `health:check` | Check health of all services |
## Service Ports
- Data Ingestion: 2001
- Data Pipeline: 2002
- Web API: 2003
- Web App: 3000 (or next available)
## Environment Variables
Key environment variables:
- `NODE_ENV`: development, test, or production
- `PORT`: Override default service port
- Database connection strings
- API keys for data providers
See `config/config/default.json` for full configuration options.

View file

@ -0,0 +1,223 @@
{
"name": "stock-bot",
"version": "1.0.0",
"environment": "development",
"service": {
"name": "stock-bot",
"port": 3000,
"host": "0.0.0.0",
"healthCheckPath": "/health",
"metricsPath": "/metrics",
"shutdownTimeout": 30000,
"cors": {
"enabled": true,
"origin": "*",
"credentials": true
}
},
"database": {
"postgres": {
"enabled": true,
"host": "localhost",
"port": 5432,
"database": "trading_bot",
"user": "trading_user",
"password": "trading_pass_dev",
"ssl": false,
"poolSize": 20,
"connectionTimeout": 30000,
"idleTimeout": 10000
},
"questdb": {
"host": "localhost",
"ilpPort": 9009,
"httpPort": 9000,
"pgPort": 8812,
"database": "questdb",
"user": "admin",
"password": "quest",
"bufferSize": 65536,
"flushInterval": 1000
},
"mongodb": {
"uri": "mongodb://trading_admin:trading_mongo_dev@localhost:27017/stock?authSource=admin",
"database": "stock",
"poolSize": 20
},
"dragonfly": {
"host": "localhost",
"port": 6379,
"db": 0,
"keyPrefix": "stock-bot:",
"maxRetries": 3,
"retryDelay": 100
}
},
"log": {
"level": "info",
"format": "json",
"hideObject": false,
"loki": {
"enabled": false,
"host": "localhost",
"port": 3100,
"labels": {}
}
},
"redis": {
"enabled": true,
"host": "localhost",
"port": 6379,
"db": 0
},
"queue": {
"enabled": true,
"redis": {
"host": "localhost",
"port": 6379,
"db": 1
},
"workers": 5,
"concurrency": 2,
"enableScheduledJobs": true,
"delayWorkerStart": false,
"defaultJobOptions": {
"attempts": 3,
"backoff": {
"type": "exponential",
"delay": 1000
},
"removeOnComplete": 100,
"removeOnFail": 50,
"timeout": 300000
}
},
"http": {
"timeout": 30000,
"retries": 3,
"retryDelay": 1000,
"userAgent": "StockBot/1.0",
"proxy": {
"enabled": false
}
},
"webshare": {
"apiKey": "",
"apiUrl": "https://proxy.webshare.io/api/v2/",
"enabled": true
},
"browser": {
"headless": true,
"timeout": 30000
},
"proxy": {
"cachePrefix": "proxy:",
"ttl": 3600
},
"providers": {
"yahoo": {
"name": "yahoo",
"enabled": true,
"priority": 1,
"rateLimit": {
"maxRequests": 5,
"windowMs": 60000
},
"timeout": 30000,
"baseUrl": "https://query1.finance.yahoo.com"
},
"qm": {
"name": "qm",
"enabled": false,
"priority": 2,
"username": "",
"password": "",
"baseUrl": "https://app.quotemedia.com/quotetools",
"webmasterId": ""
},
"ib": {
"name": "ib",
"enabled": false,
"priority": 3,
"gateway": {
"host": "localhost",
"port": 5000,
"clientId": 1
},
"marketDataType": "delayed"
},
"eod": {
"name": "eod",
"enabled": false,
"priority": 4,
"apiKey": "",
"baseUrl": "https://eodhistoricaldata.com/api",
"tier": "free"
}
},
"features": {
"realtime": true,
"backtesting": true,
"paperTrading": true,
"autoTrading": false,
"historicalData": true,
"realtimeData": true,
"fundamentalData": true,
"newsAnalysis": false,
"notifications": false,
"emailAlerts": false,
"smsAlerts": false,
"webhookAlerts": false,
"technicalAnalysis": true,
"sentimentAnalysis": false,
"patternRecognition": false,
"riskManagement": true,
"positionSizing": true,
"stopLoss": true,
"takeProfit": true
},
"services": {
"dataIngestion": {
"port": 2001,
"workers": 4,
"queues": {
"ceo": { "concurrency": 2 },
"webshare": { "concurrency": 1 },
"qm": { "concurrency": 2 },
"ib": { "concurrency": 1 },
"proxy": { "concurrency": 1 }
},
"rateLimit": {
"enabled": true,
"requestsPerSecond": 10
}
},
"dataPipeline": {
"port": 2002,
"workers": 2,
"batchSize": 1000,
"processingInterval": 60000,
"queues": {
"exchanges": { "concurrency": 1 },
"symbols": { "concurrency": 2 }
},
"syncOptions": {
"maxRetries": 3,
"retryDelay": 5000,
"timeout": 300000
}
},
"webApi": {
"port": 2003,
"rateLimitPerMinute": 60,
"cache": {
"ttl": 300,
"checkPeriod": 60
},
"cors": {
"origins": ["http://localhost:3000", "http://localhost:4200"],
"credentials": true
}
}
}
}

View file

@ -0,0 +1,11 @@
{
"environment": "development",
"log": {
"level": "debug",
"format": "pretty"
},
"features": {
"autoTrading": false,
"paperTrading": true
}
}

View file

@ -0,0 +1,42 @@
{
"environment": "production",
"log": {
"level": "warn",
"format": "json",
"loki": {
"enabled": true,
"host": "loki.production.example.com",
"port": 3100
}
},
"database": {
"postgres": {
"host": "postgres.production.example.com",
"ssl": true,
"poolSize": 50
},
"questdb": {
"host": "questdb.production.example.com"
},
"mongodb": {
"uri": "mongodb+srv://prod_user:prod_pass@cluster.mongodb.net/stock?retryWrites=true&w=majority",
"poolSize": 50
},
"dragonfly": {
"host": "redis.production.example.com",
"password": "production_redis_password"
}
},
"queue": {
"redis": {
"host": "redis.production.example.com",
"password": "production_redis_password"
}
},
"features": {
"autoTrading": true,
"notifications": true,
"emailAlerts": true,
"webhookAlerts": true
}
}

View file

@ -0,0 +1,22 @@
{
"name": "@stock-bot/stock-config",
"version": "1.0.0",
"description": "Stock trading bot configuration",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsc",
"clean": "rm -rf dist",
"dev": "tsc --watch",
"test": "jest",
"lint": "eslint src --ext .ts"
},
"dependencies": {
"@stock-bot/config": "*",
"zod": "^3.22.4"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.3"
}
}

View file

@ -0,0 +1,83 @@
import { ConfigManager, createAppConfig } from '@stock-bot/config';
import { stockAppSchema, type StockAppConfig } from './schemas';
import * as path from 'path';
let configInstance: ConfigManager<StockAppConfig> | null = null;
/**
* Initialize the stock application configuration
* @param serviceName - Optional service name to override port configuration
*/
export function initializeStockConfig(serviceName?: 'dataIngestion' | 'dataPipeline' | 'webApi'): StockAppConfig {
try {
if (!configInstance) {
configInstance = createAppConfig(stockAppSchema, {
configPath: path.join(__dirname, '../config'),
});
}
const config = configInstance.initialize(stockAppSchema);
// If a service name is provided, override the service port
if (serviceName && config.services?.[serviceName]) {
return {
...config,
service: {
...config.service,
port: config.services[serviceName].port,
name: serviceName.replace(/([A-Z])/g, '-$1').toLowerCase() // Convert camelCase to kebab-case
}
};
}
return config;
} catch (error: any) {
console.error('Failed to initialize stock configuration:', error.message);
if (error.errors) {
console.error('Validation errors:', JSON.stringify(error.errors, null, 2));
}
throw error;
}
}
/**
* Get the current stock configuration
*/
export function getStockConfig(): StockAppConfig {
if (!configInstance) {
// Auto-initialize if not already done
return initializeStockConfig();
}
return configInstance.get();
}
/**
* Get configuration for a specific service
*/
export function getServiceConfig(service: 'dataIngestion' | 'dataPipeline' | 'webApi') {
const config = getStockConfig();
return config.services?.[service];
}
/**
* Get configuration for a specific provider
*/
export function getProviderConfig(provider: 'eod' | 'ib' | 'qm' | 'yahoo') {
const config = getStockConfig();
return config.providers[provider];
}
/**
* Check if a feature is enabled
*/
export function isFeatureEnabled(feature: keyof StockAppConfig['features']): boolean {
const config = getStockConfig();
return config.features[feature];
}
/**
* Reset configuration (useful for testing)
*/
export function resetStockConfig(): void {
configInstance = null;
}

View file

@ -0,0 +1,15 @@
// Export schemas
export * from './schemas';
// Export config instance functions
export {
initializeStockConfig,
getStockConfig,
getServiceConfig,
getProviderConfig,
isFeatureEnabled,
resetStockConfig,
} from './config-instance';
// Re-export type for convenience
export type { StockAppConfig } from './schemas/stock-app.schema';

View file

@ -0,0 +1,35 @@
import { z } from 'zod';
/**
* Feature flags for the stock trading application
*/
export const featuresSchema = z.object({
// Trading features
realtime: z.boolean().default(true),
backtesting: z.boolean().default(true),
paperTrading: z.boolean().default(true),
autoTrading: z.boolean().default(false),
// Data features
historicalData: z.boolean().default(true),
realtimeData: z.boolean().default(true),
fundamentalData: z.boolean().default(true),
newsAnalysis: z.boolean().default(false),
// Notification features
notifications: z.boolean().default(false),
emailAlerts: z.boolean().default(false),
smsAlerts: z.boolean().default(false),
webhookAlerts: z.boolean().default(false),
// Analysis features
technicalAnalysis: z.boolean().default(true),
sentimentAnalysis: z.boolean().default(false),
patternRecognition: z.boolean().default(false),
// Risk management
riskManagement: z.boolean().default(true),
positionSizing: z.boolean().default(true),
stopLoss: z.boolean().default(true),
takeProfit: z.boolean().default(true),
});

View file

@ -0,0 +1,3 @@
export * from './stock-app.schema';
export * from './providers.schema';
export * from './features.schema';

View file

@ -0,0 +1,67 @@
import { z } from 'zod';
// Base provider configuration
export const baseProviderConfigSchema = z.object({
name: z.string(),
enabled: z.boolean().default(true),
priority: z.number().default(0),
rateLimit: z
.object({
maxRequests: z.number().default(100),
windowMs: z.number().default(60000),
})
.optional(),
timeout: z.number().default(30000),
retries: z.number().default(3),
});
// EOD Historical Data provider
export const eodProviderConfigSchema = baseProviderConfigSchema.extend({
apiKey: z.string(),
baseUrl: z.string().default('https://eodhistoricaldata.com/api'),
tier: z.enum(['free', 'fundamentals', 'all-in-one']).default('free'),
});
// Interactive Brokers provider
export const ibProviderConfigSchema = baseProviderConfigSchema.extend({
gateway: z.object({
host: z.string().default('localhost'),
port: z.number().default(5000),
clientId: z.number().default(1),
}),
account: z.string().optional(),
marketDataType: z.enum(['live', 'delayed', 'frozen']).default('delayed'),
});
// QuoteMedia provider
export const qmProviderConfigSchema = baseProviderConfigSchema.extend({
username: z.string(),
password: z.string(),
baseUrl: z.string().default('https://app.quotemedia.com/quotetools'),
webmasterId: z.string(),
});
// Yahoo Finance provider
export const yahooProviderConfigSchema = baseProviderConfigSchema.extend({
baseUrl: z.string().default('https://query1.finance.yahoo.com'),
cookieJar: z.boolean().default(true),
crumb: z.string().optional(),
});
// Combined provider configuration
export const providersSchema = z.object({
eod: eodProviderConfigSchema.optional(),
ib: ibProviderConfigSchema.optional(),
qm: qmProviderConfigSchema.optional(),
yahoo: yahooProviderConfigSchema.optional(),
});
// Dynamic provider configuration type
export type ProviderName = 'eod' | 'ib' | 'qm' | 'yahoo';
export const providerSchemas = {
eod: eodProviderConfigSchema,
ib: ibProviderConfigSchema,
qm: qmProviderConfigSchema,
yahoo: yahooProviderConfigSchema,
} as const;

View file

@ -0,0 +1,72 @@
import { z } from 'zod';
import {
baseAppSchema,
postgresConfigSchema,
mongodbConfigSchema,
questdbConfigSchema,
dragonflyConfigSchema
} from '@stock-bot/config';
import { providersSchema } from './providers.schema';
import { featuresSchema } from './features.schema';
/**
* Stock trading application configuration schema
*/
export const stockAppSchema = baseAppSchema.extend({
// Stock app uses all databases
database: z.object({
postgres: postgresConfigSchema,
mongodb: mongodbConfigSchema,
questdb: questdbConfigSchema,
dragonfly: dragonflyConfigSchema,
}),
// Stock-specific providers
providers: providersSchema,
// Feature flags
features: featuresSchema,
// Service-specific configurations
services: z.object({
dataIngestion: z.object({
port: z.number().default(2001),
workers: z.number().default(4),
queues: z.record(z.object({
concurrency: z.number().default(1),
})).optional(),
rateLimit: z.object({
enabled: z.boolean().default(true),
requestsPerSecond: z.number().default(10),
}).optional(),
}).optional(),
dataPipeline: z.object({
port: z.number().default(2002),
workers: z.number().default(2),
batchSize: z.number().default(1000),
processingInterval: z.number().default(60000),
queues: z.record(z.object({
concurrency: z.number().default(1),
})).optional(),
syncOptions: z.object({
maxRetries: z.number().default(3),
retryDelay: z.number().default(5000),
timeout: z.number().default(300000),
}).optional(),
}).optional(),
webApi: z.object({
port: z.number().default(2003),
rateLimitPerMinute: z.number().default(60),
cache: z.object({
ttl: z.number().default(300),
checkPeriod: z.number().default(60),
}).optional(),
cors: z.object({
origins: z.array(z.string()).default(['http://localhost:3000']),
credentials: z.boolean().default(true),
}).optional(),
}).optional(),
}).optional(),
});
export type StockAppConfig = z.infer<typeof stockAppSchema>;

View file

@ -0,0 +1,15 @@
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src",
"composite": true,
"declaration": true,
"declarationMap": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts"],
"references": [
{ "path": "../../../libs/core/config" }
]
}

View file

@ -0,0 +1,85 @@
# Awilix DI Container Migration Guide
This guide explains how to use the new Awilix dependency injection container in the data-ingestion service.
## Overview
The Awilix container provides proper dependency injection for decoupled libraries, allowing them to be reused in other projects without stock-bot specific dependencies.
## Current Implementation
The data-ingestion service now uses a hybrid approach:
1. Awilix container for ProxyManager and other decoupled services
2. Legacy service factory for backward compatibility
## Usage Example
```typescript
// Create Awilix container
const awilixConfig = {
redis: {
host: config.database.dragonfly.host,
port: config.database.dragonfly.port,
db: config.database.dragonfly.db,
},
mongodb: {
uri: config.database.mongodb.uri,
database: config.database.mongodb.database,
},
postgres: {
host: config.database.postgres.host,
port: config.database.postgres.port,
database: config.database.postgres.database,
user: config.database.postgres.user,
password: config.database.postgres.password,
},
proxy: {
cachePrefix: 'proxy:',
ttl: 3600,
},
};
const container = createServiceContainer(awilixConfig);
await initializeServices(container);
// Access services from container
const proxyManager = container.resolve('proxyManager');
const cache = container.resolve('cache');
```
## Handler Integration
Handlers receive services through the enhanced service container:
```typescript
// Create service adapter with proxy from Awilix
const serviceContainerWithProxy = createServiceAdapter(services);
Object.defineProperty(serviceContainerWithProxy, 'proxy', {
get: () => container.resolve('proxyManager'),
enumerable: true,
configurable: true
});
// Handlers can now access proxy service
class MyHandler extends BaseHandler {
async myOperation() {
const proxy = this.proxy.getRandomProxy();
// Use proxy...
}
}
```
## Benefits
1. **Decoupled Libraries**: Libraries no longer depend on @stock-bot/config
2. **Reusability**: Libraries can be used in other projects
3. **Testability**: Easy to mock dependencies for testing
4. **Type Safety**: Full TypeScript support with Awilix
## Next Steps
To fully migrate to Awilix:
1. Update HTTP library to accept dependencies via constructor
2. Update Queue library to accept Redis config via constructor
3. Create actual MongoDB, PostgreSQL, and QuestDB clients in the container
4. Remove legacy service factory once all services are migrated

View file

@ -0,0 +1,32 @@
{
"name": "@stock-bot/data-ingestion",
"version": "1.0.0",
"description": "Market data ingestion from multiple providers with proxy support and rate limiting",
"main": "dist/index.js",
"type": "module",
"scripts": {
"dev": "bun --watch src/index.ts",
"build": "bun build src/index.ts --outdir dist --target node --external chromium-bidi --external electron --external playwright --external playwright-core",
"start": "bun dist/index.js",
"test": "bun test",
"clean": "rm -rf dist"
},
"dependencies": {
"@stock-bot/cache": "*",
"@stock-bot/config": "*",
"@stock-bot/stock-config": "*",
"@stock-bot/di": "*",
"@stock-bot/handlers": "*",
"@stock-bot/logger": "*",
"@stock-bot/mongodb": "*",
"@stock-bot/postgres": "*",
"@stock-bot/questdb": "*",
"@stock-bot/queue": "*",
"@stock-bot/shutdown": "*",
"@stock-bot/utils": "*",
"hono": "^4.0.0"
},
"devDependencies": {
"typescript": "^5.0.0"
}
}

View file

@ -0,0 +1,3 @@
export { updateCeoChannels } from './update-ceo-channels.action';
export { updateUniqueSymbols } from './update-unique-symbols.action';
export { processIndividualSymbol } from './process-individual-symbol.action';

View file

@ -0,0 +1,117 @@
import { getRandomUserAgent } from '@stock-bot/utils';
import type { CeoHandler } from '../ceo.handler';
export async function processIndividualSymbol(
this: CeoHandler,
payload: any,
_context: any
): Promise<unknown> {
const { ceoId, symbol, timestamp } = payload;
const proxy = this.proxy?.getProxy();
if (!proxy) {
this.logger.warn('No proxy available for processing individual CEO symbol');
return;
}
this.logger.debug('Processing individual CEO symbol', {
ceoId,
timestamp,
});
try {
// Fetch detailed information for the individual symbol
const response = await this.http.get(
`https://api.ceo.ca/api/get_spiels?channel=${ceoId}&load_more=top` +
(timestamp ? `&until=${timestamp}` : ''),
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent(),
},
}
);
if (!response.ok) {
throw new Error(`Failed to fetch details for ceoId ${ceoId}: ${response.statusText}`);
}
const data = await response.json();
const spielCount = data.spiels.length;
if (spielCount === 0) {
this.logger.warn(`No spiels found for ceoId ${ceoId}`);
return null; // No data to process
}
const latestSpielTime = data.spiels[0]?.timestamp;
const posts = data.spiels.map((spiel: any) => ({
ceoId,
spiel: spiel.spiel,
spielReplyToId: spiel.spiel_reply_to_id,
spielReplyTo: spiel.spiel_reply_to,
spielReplyToName: spiel.spiel_reply_to_name,
spielReplyToEdited: spiel.spiel_reply_to_edited,
userId: spiel.user_id,
name: spiel.name,
timestamp: spiel.timestamp,
spielId: spiel.spiel_id,
color: spiel.color,
parentId: spiel.parent_id,
publicId: spiel.public_id,
parentChannel: spiel.parent_channel,
parentTimestamp: spiel.parent_timestamp,
votes: spiel.votes,
editable: spiel.editable,
edited: spiel.edited,
featured: spiel.featured,
verified: spiel.verified,
fake: spiel.fake,
bot: spiel.bot,
voted: spiel.voted,
flagged: spiel.flagged,
ownSpiel: spiel.own_spiel,
score: spiel.score,
savedId: spiel.saved_id,
savedTimestamp: spiel.saved_timestamp,
poll: spiel.poll,
votedInPoll: spiel.voted_in_poll,
}));
await this.mongodb.batchUpsert('ceoPosts', posts, ['spielId']);
this.logger.info(`Fetched ${spielCount} spiels for ceoId ${ceoId}`);
// Update Shorts
const shortRes = await this.http.get(
`https://api.ceo.ca/api/short_positions/one?symbol=${symbol}`,
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent(),
},
}
);
if (shortRes.ok) {
const shortData = await shortRes.json();
if (shortData && shortData.positions) {
await this.mongodb.batchUpsert('ceoShorts', shortData.positions, ['id']);
}
await this.scheduleOperation('process-individual-symbol', {
ceoId: ceoId,
timestamp: latestSpielTime,
});
}
this.logger.info(
`Successfully processed channel ${ceoId} and added channel ${ceoId} at timestamp ${latestSpielTime}`
);
return { ceoId, spielCount, timestamp };
} catch (error) {
this.logger.error('Failed to process individual symbol', {
error,
ceoId,
timestamp,
});
throw error;
}
}

View file

@ -0,0 +1,72 @@
import { getRandomUserAgent } from '@stock-bot/utils';
import type { CeoHandler } from '../ceo.handler';
export async function updateCeoChannels(
this: CeoHandler,
payload: number | undefined
): Promise<unknown> {
const proxy = this.proxy?.getProxy();
if (!proxy) {
this.logger.warn('No proxy available for CEO channels update');
return;
}
let page;
if (payload === undefined) {
page = 1;
} else {
page = payload;
}
this.logger.info(`Fetching CEO channels for page ${page} with proxy ${proxy}`);
const response = await this.http.get(
'https://api.ceo.ca/api/home?exchange=all&sort_by=symbol&sector=All&tab=companies&page=' + page,
{
proxy: proxy,
headers: {
'User-Agent': getRandomUserAgent(),
},
}
);
const results = await response.json();
const channels = results.channel_categories[0].channels;
const totalChannels = results.channel_categories[0].total_channels;
const totalPages = Math.ceil(totalChannels / channels.length);
const exchanges: { exchange: string; countryCode: string }[] = [];
const symbols = channels.map((channel: any) => {
// check if exchange is in the exchanges array object
if (!exchanges.find((e: any) => e.exchange === channel.exchange)) {
exchanges.push({
exchange: channel.exchange,
countryCode: 'CA',
});
}
const details = channel.company_details || {};
return {
symbol: channel.symbol,
exchange: channel.exchange,
name: channel.title,
type: channel.type,
ceoId: channel.channel,
marketCap: details.market_cap,
volumeRatio: details.volume_ratio,
avgVolume: details.avg_volume,
stockType: details.stock_type,
issueType: details.issue_type,
sharesOutstanding: details.shares_outstanding,
float: details.float,
};
});
await this.mongodb.batchUpsert('ceoSymbols', symbols, ['symbol', 'exchange']);
await this.mongodb.batchUpsert('ceoExchanges', exchanges, ['exchange']);
if (page === 1) {
for (let i = 2; i <= totalPages; i++) {
this.logger.info(`Scheduling page ${i} of ${totalPages} for CEO channels`);
await this.scheduleOperation('update-ceo-channels', i);
}
}
this.logger.info(`Fetched CEO channels for page ${page}/${totalPages}`);
return { page, totalPages };
}

View file

@ -0,0 +1,71 @@
import type { CeoHandler } from '../ceo.handler';
export async function updateUniqueSymbols(
this: CeoHandler,
_payload: unknown,
_context: any
): Promise<unknown> {
this.logger.info('Starting update to get unique CEO symbols by ceoId');
try {
// Get unique ceoId values from the ceoSymbols collection
const uniqueCeoIds = await this.mongodb.collection('ceoSymbols').distinct('ceoId');
this.logger.info(`Found ${uniqueCeoIds.length} unique CEO IDs`);
// Get detailed records for each unique ceoId (latest/first record)
const uniqueSymbols = [];
for (const ceoId of uniqueCeoIds) {
const symbol = await this.mongodb
.collection('ceoSymbols')
.findOne({ ceoId }, { sort: { _id: -1 } }); // Get latest record
if (symbol) {
uniqueSymbols.push(symbol);
}
}
this.logger.info(`Retrieved ${uniqueSymbols.length} unique symbol records`);
// Schedule individual jobs for each unique symbol
let scheduledJobs = 0;
for (const symbol of uniqueSymbols) {
// Schedule a job to process this individual symbol
await this.scheduleOperation('process-individual-symbol', {
ceoId: symbol.ceoId,
symbol: symbol.symbol,
});
scheduledJobs++;
// Add small delay to avoid overwhelming the queue
if (scheduledJobs % 10 === 0) {
this.logger.debug(`Scheduled ${scheduledJobs} jobs so far`);
}
}
this.logger.info(`Successfully scheduled ${scheduledJobs} individual symbol update jobs`);
// Cache the results for monitoring
await this.cacheSet(
'unique-symbols-last-run',
{
timestamp: new Date().toISOString(),
totalUniqueIds: uniqueCeoIds.length,
totalRecords: uniqueSymbols.length,
scheduledJobs,
},
1800
); // Cache for 30 minutes
return {
success: true,
uniqueCeoIds: uniqueCeoIds.length,
uniqueRecords: uniqueSymbols.length,
scheduledJobs,
timestamp: new Date().toISOString(),
};
} catch (error) {
this.logger.error('Failed to update unique CEO symbols', { error });
throw error;
}
}

View file

@ -0,0 +1,34 @@
import {
BaseHandler,
Handler,
Operation,
ScheduledOperation,
type IServiceContainer,
} from '@stock-bot/handlers';
import { processIndividualSymbol, updateCeoChannels, updateUniqueSymbols } from './actions';
@Handler('ceo')
// @Disabled()
export class CeoHandler extends BaseHandler {
constructor(services: IServiceContainer) {
super(services); // Handler name read from @Handler decorator
}
@ScheduledOperation('update-ceo-channels', '0 */15 * * *', {
priority: 7,
immediately: false,
description: 'Get all CEO symbols and exchanges',
})
updateCeoChannels = updateCeoChannels;
@Operation('update-unique-symbols')
@ScheduledOperation('process-unique-symbols', '0 0 1 * *', {
priority: 5,
immediately: false,
description: 'Process unique CEO symbols and schedule individual jobs',
})
updateUniqueSymbols = updateUniqueSymbols;
@Operation('process-individual-symbol')
processIndividualSymbol = processIndividualSymbol;
}

View file

@ -0,0 +1,94 @@
/**
* Example Handler - Demonstrates ergonomic handler patterns
* Shows inline operations, service helpers, and scheduled operations
*/
import {
BaseHandler,
Handler,
Operation,
ScheduledOperation,
type ExecutionContext,
type IServiceContainer,
} from '@stock-bot/handlers';
@Handler('example')
export class ExampleHandler extends BaseHandler {
constructor(services: IServiceContainer) {
super(services);
}
/**
* Simple inline operation - no separate action file needed
*/
@Operation('get-stats')
async getStats(): Promise<{ total: number; active: number; cached: boolean }> {
// Use collection helper for cleaner MongoDB access
const total = await this.collection('items').countDocuments();
const active = await this.collection('items').countDocuments({ status: 'active' });
// Use cache helpers with automatic prefixing
const cached = await this.cacheGet<number>('last-total');
await this.cacheSet('last-total', total, 300); // 5 minutes
// Use log helper with automatic handler context
this.log('info', 'Stats retrieved', { total, active });
return { total, active, cached: cached !== null };
}
/**
* Scheduled operation using combined decorator
*/
@ScheduledOperation('cleanup-old-items', '0 2 * * *', {
priority: 5,
description: 'Clean up items older than 30 days',
})
async cleanupOldItems(): Promise<{ deleted: number }> {
const thirtyDaysAgo = new Date(Date.now() - 30 * 24 * 60 * 60 * 1000);
const result = await this.collection('items').deleteMany({
createdAt: { $lt: thirtyDaysAgo },
});
this.log('info', 'Cleanup completed', { deleted: result.deletedCount });
// Schedule a follow-up task
await this.scheduleIn('generate-report', { type: 'cleanup' }, 60); // 1 minute
return { deleted: result.deletedCount };
}
/**
* Operation that uses proxy service
*/
@Operation('fetch-external-data')
async fetchExternalData(input: { url: string }): Promise<{ data: any }> {
const proxyUrl = this.proxy.getProxy();
if (!proxyUrl) {
throw new Error('No proxy available');
}
// Use HTTP client with proxy
const response = await this.http.get(input.url, {
proxy: proxyUrl,
timeout: 10000,
});
// Cache the result
await this.cacheSet(`external:${input.url}`, response.data, 3600);
return { data: response.data };
}
/**
* Complex operation that still uses action file
*/
@Operation('process-batch')
async processBatch(input: any, context: ExecutionContext): Promise<unknown> {
// For complex operations, still use action files
const { processBatch } = await import('./actions/batch.action');
return processBatch(this, input);
}
}

View file

@ -0,0 +1,38 @@
import type { IbHandler } from '../ib.handler';
export async function fetchExchangesAndSymbols(this: IbHandler): Promise<unknown> {
this.logger.info('Starting IB exchanges and symbols fetch job');
try {
// Fetch session headers first
const sessionHeaders = await this.fetchSession();
if (!sessionHeaders) {
this.logger.error('Failed to get session headers for IB job');
return { success: false, error: 'No session headers' };
}
this.logger.info('Session headers obtained, fetching exchanges...');
// Fetch exchanges
const exchanges = await this.fetchExchanges();
this.logger.info('Fetched exchanges from IB', { count: exchanges?.length || 0 });
// Fetch symbols
this.logger.info('Fetching symbols...');
const symbols = await this.fetchSymbols();
this.logger.info('Fetched symbols from IB', { count: symbols?.length || 0 });
return {
success: true,
exchangesCount: exchanges?.length || 0,
symbolsCount: symbols?.length || 0,
};
} catch (error) {
this.logger.error('Failed to fetch IB exchanges and symbols', { error });
return {
success: false,
error: error instanceof Error ? error.message : 'Unknown error',
};
}
}

View file

@ -0,0 +1,66 @@
import type { IbHandler } from '../ib.handler';
import { IB_CONFIG } from '../shared/config';
export async function fetchExchanges(this: IbHandler): Promise<unknown[] | null> {
try {
// First get session headers
const sessionHeaders = await this.fetchSession();
if (!sessionHeaders) {
throw new Error('Failed to get session headers');
}
this.logger.info('🔍 Fetching exchanges with session headers...');
// The URL for the exchange data API
const exchangeUrl = IB_CONFIG.BASE_URL + IB_CONFIG.EXCHANGE_API;
// Prepare headers - include all session headers plus any additional ones
const requestHeaders = {
...sessionHeaders,
Accept: 'application/json, text/plain, */*',
'Accept-Language': 'en-US,en;q=0.9',
'Cache-Control': 'no-cache',
Pragma: 'no-cache',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'X-Requested-With': 'XMLHttpRequest',
};
this.logger.info('📤 Making request to exchange API...', {
url: exchangeUrl,
headerCount: Object.keys(requestHeaders).length,
});
// Use fetch with proxy configuration
const response = await fetch(exchangeUrl, {
method: 'GET',
headers: requestHeaders,
proxy: IB_CONFIG.DEFAULT_PROXY,
});
if (!response.ok) {
this.logger.error('❌ Exchange API request failed', {
status: response.status,
statusText: response.statusText,
});
return null;
}
const data = await response.json();
const exchanges = data?.exchanges || [];
this.logger.info('✅ Exchange data fetched successfully');
this.logger.info('Saving IB exchanges to MongoDB...');
await this.mongodb.batchUpsert('ibExchanges', exchanges, ['id', 'country_code']);
this.logger.info('✅ Exchange IB data saved to MongoDB:', {
count: exchanges.length,
});
return exchanges;
} catch (error) {
this.logger.error('❌ Failed to fetch exchanges', { error });
return null;
}
}

View file

@ -0,0 +1,83 @@
import { Browser } from '@stock-bot/browser';
import type { IbHandler } from '../ib.handler';
import { IB_CONFIG } from '../shared/config';
export async function fetchSession(this: IbHandler): Promise<Record<string, string> | undefined> {
try {
await Browser.initialize({
headless: true,
timeout: IB_CONFIG.BROWSER_TIMEOUT,
blockResources: false,
});
this.logger.info('✅ Browser initialized');
const { page } = await Browser.createPageWithProxy(
IB_CONFIG.BASE_URL + IB_CONFIG.PRODUCTS_PAGE,
IB_CONFIG.DEFAULT_PROXY
);
this.logger.info('✅ Page created with proxy');
const headersPromise = new Promise<Record<string, string> | undefined>(resolve => {
let resolved = false;
page.onNetworkEvent(event => {
if (event.url.includes('/webrest/search/product-types/summary')) {
if (event.type === 'request') {
try {
resolve(event.headers);
} catch (e) {
resolve(undefined);
this.logger.debug('Raw Summary Response error', { error: (e as Error).message });
}
}
}
});
// Timeout fallback
setTimeout(() => {
if (!resolved) {
resolved = true;
this.logger.warn('Timeout waiting for headers');
resolve(undefined);
}
}, IB_CONFIG.HEADERS_TIMEOUT);
});
this.logger.info('⏳ Waiting for page load...');
await page.waitForLoadState('domcontentloaded', { timeout: IB_CONFIG.PAGE_LOAD_TIMEOUT });
this.logger.info('✅ Page loaded');
//Products tabs
this.logger.info('🔍 Looking for Products tab...');
const productsTab = page.locator('#productSearchTab[role="tab"][href="#products"]');
await productsTab.waitFor({ timeout: IB_CONFIG.ELEMENT_TIMEOUT });
this.logger.info('✅ Found Products tab');
this.logger.info('🖱️ Clicking Products tab...');
await productsTab.click();
this.logger.info('✅ Products tab clicked');
// New Products Checkbox
this.logger.info('🔍 Looking for "New Products Only" radio button...');
const radioButton = page.locator('span.checkbox-text:has-text("New Products Only")');
await radioButton.waitFor({ timeout: IB_CONFIG.ELEMENT_TIMEOUT });
this.logger.info(`🎯 Found "New Products Only" radio button`);
await radioButton.first().click();
this.logger.info('✅ "New Products Only" radio button clicked');
// Wait for and return headers immediately when captured
this.logger.info('⏳ Waiting for headers to be captured...');
const headers = await headersPromise;
page.close();
if (headers) {
this.logger.info('✅ Headers captured successfully');
} else {
this.logger.warn('⚠️ No headers were captured');
}
return headers;
} catch (error) {
this.logger.error('Failed to fetch IB symbol summary', { error });
return;
}
}

View file

@ -0,0 +1,117 @@
import type { IbHandler } from '../ib.handler';
import { IB_CONFIG } from '../shared/config';
export async function fetchSymbols(this: IbHandler): Promise<unknown[] | null> {
try {
// First get session headers
const sessionHeaders = await this.fetchSession();
if (!sessionHeaders) {
throw new Error('Failed to get session headers');
}
this.logger.info('🔍 Fetching symbols with session headers...');
// Prepare headers - include all session headers plus any additional ones
const requestHeaders = {
...sessionHeaders,
Accept: 'application/json, text/plain, */*',
'Accept-Language': 'en-US,en;q=0.9',
'Cache-Control': 'no-cache',
Pragma: 'no-cache',
'Sec-Fetch-Dest': 'empty',
'Sec-Fetch-Mode': 'cors',
'Sec-Fetch-Site': 'same-origin',
'X-Requested-With': 'XMLHttpRequest',
};
const requestBody = {
domain: 'com',
newProduct: 'all',
pageNumber: 1,
pageSize: 100,
productCountry: IB_CONFIG.PRODUCT_COUNTRIES,
productSymbol: '',
productType: IB_CONFIG.PRODUCT_TYPES,
sortDirection: 'asc',
sortField: 'symbol',
};
// Get Summary
const summaryResponse = await fetch(IB_CONFIG.BASE_URL + IB_CONFIG.SUMMARY_API, {
method: 'POST',
headers: requestHeaders,
proxy: IB_CONFIG.DEFAULT_PROXY,
body: JSON.stringify(requestBody),
});
if (!summaryResponse.ok) {
this.logger.error('❌ Summary API request failed', {
status: summaryResponse.status,
statusText: summaryResponse.statusText,
});
return null;
}
const summaryData = await summaryResponse.json();
this.logger.info('✅ IB Summary data fetched successfully', {
totalCount: summaryData[0].totalCount,
});
const symbols = [];
requestBody.pageSize = IB_CONFIG.PAGE_SIZE;
const pageCount = Math.ceil(summaryData[0].totalCount / IB_CONFIG.PAGE_SIZE) || 0;
this.logger.info('Fetching Symbols for IB', { pageCount });
const symbolPromises = [];
for (let page = 1; page <= pageCount; page++) {
requestBody.pageNumber = page;
// Fetch symbols for the current page
const symbolsResponse = fetch(IB_CONFIG.BASE_URL + IB_CONFIG.PRODUCTS_API, {
method: 'POST',
headers: requestHeaders,
proxy: IB_CONFIG.DEFAULT_PROXY,
body: JSON.stringify(requestBody),
});
symbolPromises.push(symbolsResponse);
}
const responses = await Promise.all(symbolPromises);
for (const response of responses) {
if (!response.ok) {
this.logger.error('❌ Symbols API request failed', {
status: response.status,
statusText: response.statusText,
});
return null;
}
const data = await response.json();
const symJson = data?.products || [];
if (symJson && symJson.length > 0) {
symbols.push(...symJson);
} else {
this.logger.warn('⚠️ No symbols found in response');
continue;
}
}
if (symbols.length === 0) {
this.logger.warn('⚠️ No symbols fetched from IB');
return null;
}
this.logger.info('✅ IB symbols fetched successfully, saving to DB...', {
totalSymbols: symbols.length,
});
await this.mongodb.batchUpsert('ib_symbols', symbols, ['symbol', 'exchangeId']);
this.logger.info('Saved IB symbols to DB', {
totalSymbols: symbols.length,
});
return symbols;
} catch (error) {
this.logger.error('❌ Failed to fetch symbols', { error });
return null;
}
}

View file

@ -0,0 +1,5 @@
export { fetchSession } from './fetch-session.action';
export { fetchExchanges } from './fetch-exchanges.action';
export { fetchSymbols } from './fetch-symbols.action';
export { fetchExchangesAndSymbols } from './fetch-exchanges-and-symbols.action';

View file

@ -0,0 +1,33 @@
import {
BaseHandler,
Handler,
Operation,
ScheduledOperation,
type IServiceContainer,
} from '@stock-bot/handlers';
import { fetchExchanges, fetchExchangesAndSymbols, fetchSession, fetchSymbols } from './actions';
@Handler('ib')
export class IbHandler extends BaseHandler {
constructor(services: IServiceContainer) {
super(services);
}
@Operation('fetch-session')
fetchSession = fetchSession;
@Operation('fetch-exchanges')
fetchExchanges = fetchExchanges;
@Operation('fetch-symbols')
fetchSymbols = fetchSymbols;
@Operation('ib-exchanges-and-symbols')
@ScheduledOperation('ib-exchanges-and-symbols', '0 0 * * 0', {
priority: 5,
description: 'Fetch and update IB exchanges and symbols data',
immediately: false,
})
fetchExchangesAndSymbols = fetchExchangesAndSymbols;
}

View file

@ -0,0 +1,24 @@
/**
* Interactive Brokers Configuration Constants
*/
export const IB_CONFIG = {
BASE_URL: 'https://www.interactivebrokers.com',
PRODUCTS_PAGE: '/en/trading/products-exchanges.php#/',
EXCHANGE_API: '/webrest/exchanges',
SUMMARY_API: '/webrest/search/product-types/summary',
PRODUCTS_API: '/webrest/search/products-by-filters',
// Browser configuration
BROWSER_TIMEOUT: 10000,
PAGE_LOAD_TIMEOUT: 20000,
ELEMENT_TIMEOUT: 5000,
HEADERS_TIMEOUT: 30000,
// API configuration
DEFAULT_PROXY: 'http://doimvbnb-US-rotate:w5fpiwrb9895@p.webshare.io:80',
PAGE_SIZE: 500,
PRODUCT_COUNTRIES: ['CA', 'US'],
PRODUCT_TYPES: ['STK'],
};

View file

@ -0,0 +1,60 @@
/**
* Handler auto-registration
* Automatically discovers and registers all handlers
*/
import type { IServiceContainer } from '@stock-bot/handlers';
import { autoRegisterHandlers } from '@stock-bot/handlers';
import { getLogger } from '@stock-bot/logger';
// Import handlers for bundling (ensures they're included in the build)
import './ceo/ceo.handler';
import './ib/ib.handler';
import './proxy/proxy.handler';
import './qm/qm.handler';
import './webshare/webshare.handler';
// Add more handler imports as needed
const logger = getLogger('handler-init');
/**
* Initialize and register all handlers automatically
*/
export async function initializeAllHandlers(serviceContainer: IServiceContainer): Promise<void> {
try {
// Auto-register all handlers in this directory
const result = await autoRegisterHandlers(__dirname, serviceContainer, {
pattern: '.handler.',
exclude: ['test', 'spec'],
dryRun: false,
});
logger.info('Handler auto-registration complete', {
registered: result.registered,
failed: result.failed,
});
if (result.failed.length > 0) {
logger.error('Some handlers failed to register', { failed: result.failed });
}
} catch (error) {
logger.error('Handler auto-registration failed', { error });
// Fall back to manual registration
await manualHandlerRegistration(serviceContainer);
}
}
/**
* Manual fallback registration
*/
async function manualHandlerRegistration(_serviceContainer: any): Promise<void> {
logger.warn('Falling back to manual handler registration');
try {
logger.info('Manual handler registration complete');
} catch (error) {
logger.error('Manual handler registration failed', { error });
throw error;
}
}

View file

@ -0,0 +1,176 @@
/**
* Proxy Check Operations - Checking proxy functionality
*/
import { OperationContext } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
import type { ProxyInfo } from '@stock-bot/proxy';
import { fetch } from '@stock-bot/utils';
import { PROXY_CONFIG } from '../shared/config';
/**
* Check if a proxy is working
*/
export async function checkProxy(proxy: ProxyInfo): Promise<ProxyInfo> {
const ctx = {
logger: getLogger('proxy-check'),
resolve: <T>(_name: string) => {
throw new Error(`Service container not available for proxy operations`);
},
} as any;
let success = false;
ctx.logger.debug(`Checking Proxy:`, {
protocol: proxy.protocol,
host: proxy.host,
port: proxy.port,
});
try {
// Test the proxy using fetch with proxy support
const proxyUrl =
proxy.username && proxy.password
? `${proxy.protocol}://${encodeURIComponent(proxy.username)}:${encodeURIComponent(proxy.password)}@${proxy.host}:${proxy.port}`
: `${proxy.protocol}://${proxy.host}:${proxy.port}`;
const response = await fetch(PROXY_CONFIG.CHECK_URL, {
proxy: proxyUrl,
signal: AbortSignal.timeout(PROXY_CONFIG.CHECK_TIMEOUT),
logger: ctx.logger,
} as any);
const data = await response.text();
const isWorking = response.ok;
const result: ProxyInfo = {
...proxy,
isWorking,
lastChecked: new Date(),
};
if (isWorking && !data.includes(PROXY_CONFIG.CHECK_IP)) {
success = true;
await updateProxyInCache(result, true, ctx);
} else {
await updateProxyInCache(result, false, ctx);
}
if (proxy.source) {
updateProxyStats(proxy.source, success, ctx);
}
ctx.logger.debug('Proxy check completed', {
host: proxy.host,
port: proxy.port,
isWorking,
});
return result;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
const result: ProxyInfo = {
...proxy,
isWorking: false,
error: errorMessage,
lastChecked: new Date(),
};
// Update cache for failed proxy (increment total, don't update TTL)
await updateProxyInCache(result, false, ctx);
if (proxy.source) {
updateProxyStats(proxy.source, success, ctx);
}
ctx.logger.debug('Proxy check failed', {
host: proxy.host,
port: proxy.port,
error: errorMessage,
});
return result;
}
}
/**
* Update proxy data in cache with working/total stats and average response time
*/
async function updateProxyInCache(
proxy: ProxyInfo,
isWorking: boolean,
ctx: OperationContext
): Promise<void> {
const _cacheKey = `${PROXY_CONFIG.CACHE_KEY}:${proxy.protocol}://${proxy.host}:${proxy.port}`;
try {
// For now, skip cache operations without service container
// TODO: Pass service container to operations
const existing: ProxyInfo | null = null;
// For failed proxies, only update if they already exist
if (!isWorking && !existing) {
ctx.logger.debug('Proxy not in cache, skipping failed update', {
proxy: `${proxy.host}:${proxy.port}`,
});
return;
}
// Calculate new average response time if we have a response time
let newAverageResponseTime = existing?.averageResponseTime;
if (proxy.responseTime !== undefined) {
const existingAvg = existing?.averageResponseTime || 0;
const existingTotal = existing?.total || 0;
// Calculate weighted average: (existing_avg * existing_count + new_response) / (existing_count + 1)
newAverageResponseTime =
existingTotal > 0
? (existingAvg * existingTotal + proxy.responseTime) / (existingTotal + 1)
: proxy.responseTime;
}
// Build updated proxy data
const updated = {
...existing,
...proxy, // Keep latest proxy info
total: (existing?.total || 0) + 1,
working: isWorking ? (existing?.working || 0) + 1 : existing?.working || 0,
isWorking,
lastChecked: new Date(),
// Add firstSeen only for new entries
...(existing ? {} : { firstSeen: new Date() }),
// Update average response time if we calculated a new one
...(newAverageResponseTime !== undefined
? { averageResponseTime: newAverageResponseTime }
: {}),
};
// Calculate success rate
updated.successRate = updated.total > 0 ? (updated.working / updated.total) * 100 : 0;
// Save to cache: reset TTL for working proxies, keep existing TTL for failed ones
const _cacheOptions = isWorking ? { ttl: PROXY_CONFIG.CACHE_TTL } : undefined;
// Skip cache operations without service container
// TODO: Pass service container to operations
ctx.logger.debug(`Updated ${isWorking ? 'working' : 'failed'} proxy in cache`, {
proxy: `${proxy.host}:${proxy.port}`,
working: updated.working,
total: updated.total,
successRate: updated.successRate.toFixed(1) + '%',
avgResponseTime: updated.averageResponseTime
? `${updated.averageResponseTime.toFixed(0)}ms`
: 'N/A',
});
} catch (error) {
ctx.logger.error('Failed to update proxy in cache', {
proxy: `${proxy.host}:${proxy.port}`,
error: error instanceof Error ? error.message : String(error),
});
}
}
function updateProxyStats(sourceId: string, success: boolean, ctx: OperationContext) {
// Stats are now handled by the global ProxyManager
ctx.logger.debug('Proxy check result', { sourceId, success });
// TODO: Integrate with global ProxyManager stats if needed
}

View file

@ -0,0 +1,104 @@
/**
* Proxy Fetch Operations - Fetching proxies from sources
*/
import type { ProxyInfo } from '@stock-bot/proxy';
import { OperationContext } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
import { fetch } from '@stock-bot/utils';
import { PROXY_CONFIG } from '../shared/config';
import type { ProxySource } from '../shared/types';
export async function fetchProxiesFromSources(): Promise<ProxyInfo[]> {
const ctx = {
logger: getLogger('proxy-fetch')
} as any;
ctx.logger.info('Starting proxy fetch from sources');
const fetchPromises = PROXY_CONFIG.PROXY_SOURCES.map(source => fetchProxiesFromSource(source, ctx));
const results = await Promise.all(fetchPromises);
let allProxies: ProxyInfo[] = results.flat();
allProxies = removeDuplicateProxies(allProxies);
ctx.logger.info('Fetched proxies from all sources', { total: allProxies.length });
return allProxies;
}
export async function fetchProxiesFromSource(source: ProxySource, ctx?: OperationContext): Promise<ProxyInfo[]> {
if (!ctx) {
ctx = OperationContext.create('proxy', 'fetch-source');
}
const allProxies: ProxyInfo[] = [];
try {
ctx.logger.info(`Fetching proxies from ${source.url}`);
const response = await fetch(source.url, {
signal: AbortSignal.timeout(10000),
logger: ctx.logger
} as any);
if (!response.ok) {
ctx.logger.warn(`Failed to fetch from ${source.url}: ${response.status}`);
return [];
}
const text = await response.text();
const lines = text.split('\n').filter((line: string) => line.trim());
for (const line of lines) {
let trimmed = line.trim();
trimmed = cleanProxyUrl(trimmed);
if (!trimmed || trimmed.startsWith('#')) {
continue;
}
// Parse formats like \"host:port\" or \"host:port:user:pass\"
const parts = trimmed.split(':');
if (parts.length >= 2) {
const proxy: ProxyInfo = {
source: source.id,
protocol: source.protocol as 'http' | 'https',
host: parts[0],
port: parseInt(parts[1]),
};
if (!isNaN(proxy.port) && proxy.host) {
allProxies.push(proxy);
}
}
}
ctx.logger.info(`Parsed ${allProxies.length} proxies from ${source.url}`);
} catch (error) {
ctx.logger.error(`Error fetching proxies from ${source.url}`, error);
return [];
}
return allProxies;
}
// Utility functions
function cleanProxyUrl(url: string): string {
return url
.replace(/^https?:\/\//, '')
.replace(/^0+/, '')
.replace(/:0+(\d)/g, ':$1');
}
function removeDuplicateProxies(proxies: ProxyInfo[]): ProxyInfo[] {
const seen = new Set<string>();
const unique: ProxyInfo[] = [];
for (const proxy of proxies) {
const key = `${proxy.protocol}://${proxy.host}:${proxy.port}`;
if (!seen.has(key)) {
seen.add(key);
unique.push(proxy);
}
}
return unique;
}

View file

@ -0,0 +1,81 @@
/**
* Proxy Query Operations - Getting active proxies from cache
*/
import { OperationContext } from '@stock-bot/di';
import type { ProxyInfo } from '@stock-bot/proxy';
import { PROXY_CONFIG } from '../shared/config';
/**
* Get a random active proxy from the cache
* @param protocol - Optional protocol filter ('http' | 'https' | 'socks4' | 'socks5')
* @param minSuccessRate - Minimum success rate percentage (default: 50)
* @returns A random working proxy or null if none found
*/
export async function getRandomActiveProxy(
protocol?: 'http' | 'https' | 'socks4' | 'socks5',
minSuccessRate: number = 50
): Promise<ProxyInfo | null> {
const ctx = OperationContext.create('proxy', 'get-random');
try {
// Get all active proxy keys from cache
const pattern = protocol
? `${PROXY_CONFIG.CACHE_KEY}:${protocol}://*`
: `${PROXY_CONFIG.CACHE_KEY}:*`;
const keys = await ctx.cache.keys(pattern);
if (keys.length === 0) {
ctx.logger.debug('No active proxies found in cache', { pattern });
return null;
}
// Shuffle the keys for randomness
const shuffledKeys = keys.sort(() => Math.random() - 0.5);
// Find a working proxy that meets the criteria
for (const key of shuffledKeys) {
try {
const proxyData: ProxyInfo | null = await ctx.cache.get(key);
if (
proxyData &&
proxyData.isWorking &&
(!proxyData.successRate || proxyData.successRate >= minSuccessRate)
) {
ctx.logger.debug('Random active proxy selected', {
proxy: `${proxyData.host}:${proxyData.port}`,
protocol: proxyData.protocol,
successRate: proxyData.successRate?.toFixed(1) + '%',
avgResponseTime: proxyData.averageResponseTime
? `${proxyData.averageResponseTime.toFixed(0)}ms`
: 'N/A',
});
return proxyData;
}
} catch (error) {
ctx.logger.debug('Error reading proxy from cache', {
key,
error: (error as Error).message,
});
continue;
}
}
ctx.logger.debug('No working proxies found meeting criteria', {
protocol,
minSuccessRate,
keysChecked: shuffledKeys.length,
});
return null;
} catch (error) {
ctx.logger.error('Error getting random active proxy', {
error: error instanceof Error ? error.message : String(error),
protocol,
minSuccessRate,
});
return null;
}
}

View file

@ -0,0 +1,48 @@
/**
* Proxy Queue Operations - Queueing proxy operations
*/
import { OperationContext } from '@stock-bot/di';
import type { ProxyInfo } from '@stock-bot/proxy';
import type { IServiceContainer } from '@stock-bot/handlers';
export async function queueProxyFetch(container: IServiceContainer): Promise<string> {
const ctx = OperationContext.create('proxy', 'queue-fetch');
const queueManager = container.queue;
if (!queueManager) {
throw new Error('Queue manager not available');
}
const queue = queueManager.getQueue('proxy');
const job = await queue.add('proxy-fetch', {
handler: 'proxy',
operation: 'fetch-and-check',
payload: {},
priority: 5,
});
const jobId = job.id || 'unknown';
ctx.logger.info('Proxy fetch job queued', { jobId });
return jobId;
}
export async function queueProxyCheck(proxies: ProxyInfo[], container: IServiceContainer): Promise<string> {
const ctx = OperationContext.create('proxy', 'queue-check');
const queueManager = container.queue;
if (!queueManager) {
throw new Error('Queue manager not available');
}
const queue = queueManager.getQueue('proxy');
const job = await queue.add('proxy-check', {
handler: 'proxy',
operation: 'check-specific',
payload: { proxies },
priority: 3,
});
const jobId = job.id || 'unknown';
ctx.logger.info('Proxy check job queued', { jobId, count: proxies.length });
return jobId;
}

View file

@ -0,0 +1,86 @@
import {
BaseHandler,
Handler,
Operation,
ScheduledOperation,
type IServiceContainer,
} from '@stock-bot/handlers';
import type { ProxyInfo } from '@stock-bot/proxy';
import { processItems } from '@stock-bot/queue';
import { fetchProxiesFromSources } from './operations/fetch.operations';
import { checkProxy } from './operations/check.operations';
@Handler('proxy')
export class ProxyHandler extends BaseHandler {
constructor(services: IServiceContainer) {
super(services);
}
@Operation('fetch-from-sources')
@ScheduledOperation('proxy-fetch-and-check', '0 0 * * 0', {
priority: 0,
description: 'Fetch and validate proxy list from sources',
// immediately: true, // Don't run immediately during startup to avoid conflicts
})
async fetchFromSources(): Promise<{
processed: number;
jobsCreated: number;
batchesCreated?: number;
mode: string;
}> {
// Fetch proxies from all configured sources
this.logger.info('Processing fetch proxies from sources request');
const proxies = await fetchProxiesFromSources();
this.logger.info('Fetched proxies from sources', { count: proxies.length });
if (proxies.length === 0) {
this.logger.warn('No proxies fetched from sources');
return { processed: 0, jobsCreated: 0, mode: 'direct' };
}
// Get QueueManager from service container
const queueManager = this.queue;
if (!queueManager) {
throw new Error('Queue manager not available');
}
// Batch process the proxies through check-proxy operation
const batchResult = await processItems(proxies, 'proxy', {
handler: 'proxy',
operation: 'check-proxy',
totalDelayHours: 0.083, // 5 minutes (5/60 hours)
batchSize: 50, // Process 50 proxies per batch
priority: 3,
useBatching: true,
retries: 1,
ttl: 30000, // 30 second timeout per proxy check
removeOnComplete: 5,
removeOnFail: 3,
}, queueManager);
this.logger.info('Batch proxy validation completed', {
totalProxies: proxies.length,
jobsCreated: batchResult.jobsCreated,
mode: batchResult.mode,
batchesCreated: batchResult.batchesCreated,
duration: `${batchResult.duration}ms`,
});
return {
processed: proxies.length,
jobsCreated: batchResult.jobsCreated,
batchesCreated: batchResult.batchesCreated,
mode: batchResult.mode,
};
}
@Operation('check-proxy')
async checkProxyOperation(payload: ProxyInfo): Promise<unknown> {
// payload is now the raw proxy info object
this.logger.debug('Processing proxy check request', {
proxy: `${payload.host}:${payload.port}`,
});
return checkProxy(payload);
}
}

View file

@ -0,0 +1,140 @@
/**
* Proxy Configuration Constants
*/
export const PROXY_CONFIG = {
CACHE_KEY: 'active',
CACHE_STATS_KEY: 'stats',
CACHE_TTL: 86400, // 24 hours
CHECK_TIMEOUT: 7000,
CHECK_IP: '99.246.102.205',
CHECK_URL: 'https://proxy-detection.stare.gg/?api_key=bd406bf53ddc6abe1d9de5907830a955',
PROXY_SOURCES: [
{
id: 'prxchk',
url: 'https://raw.githubusercontent.com/prxchk/proxy-list/main/http.txt',
protocol: 'http',
},
{
id: 'casals',
url: 'https://raw.githubusercontent.com/casals-ar/proxy-list/main/http',
protocol: 'http',
},
{
id: 'sunny9577',
url: 'https://raw.githubusercontent.com/sunny9577/proxy-scraper/master/proxies.txt',
protocol: 'http',
},
{
id: 'themiralay',
url: 'https://raw.githubusercontent.com/themiralay/Proxy-List-World/refs/heads/master/data.txt',
protocol: 'http',
},
{
id: 'casa-ls',
url: 'https://raw.githubusercontent.com/casa-ls/proxy-list/refs/heads/main/http',
protocol: 'http',
},
{
id: 'databay',
url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/http.txt',
protocol: 'http',
},
{
id: 'speedx',
url: 'https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt',
protocol: 'http',
},
{
id: 'monosans',
url: 'https://raw.githubusercontent.com/monosans/proxy-list/main/proxies/http.txt',
protocol: 'http',
},
{
id: 'murong',
url: 'https://raw.githubusercontent.com/MuRongPIG/Proxy-Master/main/http.txt',
protocol: 'http',
},
{
id: 'vakhov-fresh',
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/master/http.txt',
protocol: 'http',
},
{
id: 'kangproxy',
url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/http/http.txt',
protocol: 'http',
},
{
id: 'gfpcom',
url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/http.txt',
protocol: 'http',
},
{
id: 'dpangestuw',
url: 'https://raw.githubusercontent.com/dpangestuw/Free-Proxy/refs/heads/main/http_proxies.txt',
protocol: 'http',
},
{
id: 'gitrecon',
url: 'https://raw.githubusercontent.com/gitrecon1455/fresh-proxy-list/refs/heads/main/proxylist.txt',
protocol: 'http',
},
{
id: 'vakhov-master',
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/http.txt',
protocol: 'http',
},
{
id: 'breaking-tech',
url: 'https://raw.githubusercontent.com/BreakingTechFr/Proxy_Free/refs/heads/main/proxies/http.txt',
protocol: 'http',
},
{
id: 'ercindedeoglu',
url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/http.txt',
protocol: 'http',
},
{
id: 'tuanminpay',
url: 'https://raw.githubusercontent.com/TuanMinPay/live-proxy/master/http.txt',
protocol: 'http',
},
{
id: 'r00tee-https',
url: 'https://raw.githubusercontent.com/r00tee/Proxy-List/refs/heads/main/Https.txt',
protocol: 'https',
},
{
id: 'ercindedeoglu-https',
url: 'https://raw.githubusercontent.com/ErcinDedeoglu/proxies/main/proxies/https.txt',
protocol: 'https',
},
{
id: 'vakhov-fresh-https',
url: 'https://raw.githubusercontent.com/vakhov/fresh-proxy-list/refs/heads/master/https.txt',
protocol: 'https',
},
{
id: 'databay-https',
url: 'https://raw.githubusercontent.com/databay-labs/free-proxy-list/refs/heads/master/https.txt',
protocol: 'https',
},
{
id: 'kangproxy-https',
url: 'https://raw.githubusercontent.com/officialputuid/KangProxy/refs/heads/KangProxy/https/https.txt',
protocol: 'https',
},
{
id: 'zloi-user-https',
url: 'https://raw.githubusercontent.com/zloi-user/hideip.me/refs/heads/master/https.txt',
protocol: 'https',
},
{
id: 'gfpcom-https',
url: 'https://raw.githubusercontent.com/gfpcom/free-proxy-list/refs/heads/main/list/https.txt',
protocol: 'https',
},
],
};

View file

@ -0,0 +1,13 @@
/**
* Proxy Shared Types
*/
export interface ProxySource {
id: string;
url: string;
protocol: string;
working?: number; // Optional, used for stats
total?: number; // Optional, used for stats
percentWorking?: number; // Optional, used for stats
lastChecked?: Date; // Optional, used for stats
}

View file

@ -0,0 +1,19 @@
/**
* QM Exchanges Operations - Simple exchange data fetching
*/
import type { IServiceContainer } from '@stock-bot/handlers';
export async function fetchExchanges(services: IServiceContainer): Promise<any[]> {
// Get exchanges from MongoDB
const exchanges = await services.mongodb.collection('qm_exchanges').find({}).toArray();
return exchanges;
}
export async function getExchangeByCode(services: IServiceContainer, code: string): Promise<any> {
// Get specific exchange by code
const exchange = await services.mongodb.collection('qm_exchanges').findOne({ code });
return exchange;
}

View file

@ -0,0 +1,72 @@
/**
* QM Session Actions - Session management and creation
*/
import { BaseHandler } from '@stock-bot/core/handlers';
import { QM_SESSION_IDS, SESSION_CONFIG } from '../shared/config';
import { QMSessionManager } from '../shared/session-manager';
/**
* Check existing sessions and queue creation jobs for needed sessions
*/
export async function checkSessions(handler: BaseHandler): Promise<{
cleaned: number;
queued: number;
message: string;
}> {
const sessionManager = QMSessionManager.getInstance();
const cleanedCount = sessionManager.cleanupFailedSessions();
// Check which session IDs need more sessions and queue creation jobs
let queuedCount = 0;
for (const [sessionType, sessionId] of Object.entries(QM_SESSION_IDS)) {
console.log(`Checking session ID: ${sessionId}`);
if (sessionManager.needsMoreSessions(sessionId)) {
const currentCount = sessionManager.getSessions(sessionId).length;
const neededSessions = SESSION_CONFIG.MAX_SESSIONS - currentCount;
for (let i = 0; i < neededSessions; i++) {
await handler.scheduleOperation('create-session', { sessionId, sessionType });
handler.logger.info(`Queued job to create session for ${sessionType}`);
queuedCount++;
}
}
}
return {
cleaned: cleanedCount,
queued: queuedCount,
message: `Session check completed: cleaned ${cleanedCount}, queued ${queuedCount}`,
};
}
/**
* Create a single session for a specific session ID
*/
export async function createSingleSession(
handler: BaseHandler,
input: any
): Promise<{ sessionId: string; status: string; sessionType: string }> {
const { sessionId, sessionType } = input || {};
const sessionManager = QMSessionManager.getInstance();
// Get proxy from proxy service
const proxyString = handler.proxy.getProxy();
// const session = {
// proxy: proxyString || 'http://proxy:8080',
// headers: sessionManager.getQmHeaders(),
// successfulCalls: 0,
// failedCalls: 0,
// lastUsed: new Date()
// };
handler.logger.info(`Creating session for ${sessionType}`);
// Add session to manager
// sessionManager.addSession(sessionType, session);
return {
sessionId: sessionType,
status: 'created',
sessionType,
};
}

View file

@ -0,0 +1,33 @@
/**
* QM Spider Operations - Simple symbol discovery
*/
import type { IServiceContainer } from '@stock-bot/handlers';
import type { SymbolSpiderJob } from '../shared/types';
export async function spiderSymbolSearch(
services: IServiceContainer,
config: SymbolSpiderJob
): Promise<{ foundSymbols: number; depth: number }> {
// Simple spider implementation
// TODO: Implement actual API calls to discover symbols
// For now, just return mock results
const foundSymbols = Math.floor(Math.random() * 10) + 1;
return {
foundSymbols,
depth: config.depth,
};
}
export async function queueSymbolDiscovery(
services: IServiceContainer,
searchTerms: string[]
): Promise<void> {
// Queue symbol discovery jobs
for (const term of searchTerms) {
// TODO: Queue actual discovery jobs
await services.cache.set(`discovery:${term}`, { queued: true }, 3600);
}
}

View file

@ -0,0 +1,19 @@
/**
* QM Symbols Operations - Simple symbol fetching
*/
import type { IServiceContainer } from '@stock-bot/handlers';
export async function searchSymbols(services: IServiceContainer): Promise<any[]> {
// Get symbols from MongoDB
const symbols = await services.mongodb.collection('qm_symbols').find({}).limit(50).toArray();
return symbols;
}
export async function fetchSymbolData(services: IServiceContainer, symbol: string): Promise<any> {
// Fetch data for a specific symbol
const symbolData = await services.mongodb.collection('qm_symbols').findOne({ symbol });
return symbolData;
}

View file

@ -0,0 +1,103 @@
import { BaseHandler, Handler, type IServiceContainer } from '@stock-bot/handlers';
@Handler('qm')
export class QMHandler extends BaseHandler {
constructor(services: IServiceContainer) {
super(services); // Handler name read from @Handler decorator
}
// @Operation('check-sessions')
// @QueueSchedule('0 */15 * * *', {
// priority: 7,
// immediately: true,
// description: 'Check and maintain QM sessions'
// })
// async checkSessions(input: unknown, context: ExecutionContext): Promise<unknown> {
// // Call the session maintenance action
// const { checkSessions } = await import('./actions/session.action');
// return await checkSessions(this);
// }
// @Operation('create-session')
// async createSession(input: unknown, context: ExecutionContext): Promise<unknown> {
// // Call the individual session creation action
// const { createSingleSession } = await import('./actions/session.action');
// return await createSingleSession(this, input);
// }
// @Operation('search-symbols')
// async searchSymbols(_input: unknown, _context: ExecutionContext): Promise<unknown> {
// this.logger.info('Searching QM symbols with new DI pattern...');
// try {
// // Check existing symbols in MongoDB
// const symbolsCollection = this.mongodb.collection('qm_symbols');
// const symbols = await symbolsCollection.find({}).limit(100).toArray();
// this.logger.info('QM symbol search completed', { count: symbols.length });
// if (symbols && symbols.length > 0) {
// // Cache result for performance
// await this.cache.set('qm-symbols-sample', symbols.slice(0, 10), 1800);
// return {
// success: true,
// message: 'QM symbol search completed successfully',
// count: symbols.length,
// symbols: symbols.slice(0, 10), // Return first 10 symbols as sample
// };
// } else {
// // No symbols found - this is expected initially
// this.logger.info('No QM symbols found in database yet');
// return {
// success: true,
// message: 'No symbols found yet - database is empty',
// count: 0,
// };
// }
// } catch (error) {
// this.logger.error('Failed to search QM symbols', { error });
// throw error;
// }
// }
// @Operation('spider-symbol-search')
// @QueueSchedule('0 0 * * 0', {
// priority: 10,
// immediately: false,
// description: 'Comprehensive symbol search using QM API'
// })
// async spiderSymbolSearch(payload: SymbolSpiderJob | undefined, context: ExecutionContext): Promise<unknown> {
// // Set default payload for scheduled runs
// const jobPayload: SymbolSpiderJob = payload || {
// prefix: null,
// depth: 1,
// source: 'qm',
// maxDepth: 4
// };
// this.logger.info('Starting QM spider symbol search', { payload: jobPayload });
// // Store spider job info in cache (temporary data)
// const spiderJobId = `spider:qm:${Date.now()}:${Math.random().toString(36).substr(2, 9)}`;
// const spiderResult = {
// payload: jobPayload,
// startTime: new Date().toISOString(),
// status: 'started',
// jobId: spiderJobId
// };
// // Store in cache with 1 hour TTL (temporary data)
// await this.cache.set(spiderJobId, spiderResult, 3600);
// this.logger.debug('Spider job stored in cache', { spiderJobId, ttl: 3600 });
// // Schedule follow-up processing if needed
// await this.scheduleOperation('search-symbols', { source: 'spider', spiderJobId }, { delay: 5000 });
// return {
// success: true,
// message: 'QM spider search initiated',
// spiderJobId
// };
// }
}

View file

@ -0,0 +1,38 @@
/**
* Shared configuration for QM operations
*/
// QM Session IDs for different endpoints
export const QM_SESSION_IDS = {
LOOKUP: 'dc8c9930437f65d30f6597768800957017bac203a0a50342932757c8dfa158d6', // lookup endpoint
// '5ad521e05faf5778d567f6d0012ec34d6cdbaeb2462f41568f66558bc7b4ced9': [], //4488d072b
// cc1cbdaf040f76db8f4c94f7d156b9b9b716e1a7509ec9c74a48a47f6b6b9f87: [], //97ff00cf3 // getQuotes
// '74963ff42f1db2320d051762b5d3950ff9eab23f9d5c5b592551b4ca0441d086': [], //32ca24e394b // getSplitsBySymbol getBrokerRatingsBySymbol getDividendsBySymbol getEarningsSurprisesBySymbol getEarningsEventsBySymbol
// '1e1d7cb1de1fd2fe52684abdea41a446919a5fe12776dfab88615ac1ce1ec2f6': [], //fb5721812d2c // getEnhancedQuotes getProfiles
// a900a06cc6b3e8036afb9eeb1bbf9783f0007698ed8f5cb1e373dc790e7be2e5: [], //cc882cd95f9 // getEnhancedQuotes
// a863d519e38f80e45d10e280fb1afc729816e23f0218db2f3e8b23005a9ad8dd: [], //05a09a41225 // getCompanyFilings getEnhancedQuotes
// b3cdb1873f3682c5aeeac097be6181529bfb755945e5a412a24f4b9316291427: [], //6a63f56a6 // getHeadlinesTickerStory
// '97b24911d7b034620aafad9441afdb2bc906ee5c992d86933c5903254ca29709': [], //c56424868d // detailed-quotes
// '8a394f09cb8540c8be8988780660a7ae5b583c331a1f6cb12834f051a0169a8f': [], //2a86d214e50e5 // getGlobalIndustrySectorPeers getKeyRatiosBySymbol getGlobalIndustrySectorCodeList
// '2f059f75e2a839437095c9e7e4991d2365bafa7bbb086672a87ae0cf8d92eb01': [], // 48fa36d // getNethouseBySymbol
// d7ae7e0091dd1d7011948c3dc4af09b5ec552285d92bb188be2618968bc78e3f: [], // 63548ee //getRecentTradesBySymbol getQuotes getLevel2Quote getRecentTradesBySymbol
// d22d1db8f67fe6e420b4028e5129b289ca64862aa6cee8459193747b68c01de3: [], // 84e9e
// '6e0b22a7cbc02ac3fa07d45e2880b7696aaebeb29574dce81789e570570c9002': [], //
// Add other session IDs as needed
} as const;
// QM API Configuration
export const QM_CONFIG = {
BASE_URL: 'https://app.quotemedia.com',
AUTH_PATH: '/auth/g/authenticate/dataTool/v0/500',
LOOKUP_URL: 'https://app.quotemedia.com/datatool/lookup.json',
} as const;
// Session management settings
export const SESSION_CONFIG = {
MIN_SESSIONS: 5,
MAX_SESSIONS: 10,
MAX_FAILED_CALLS: 10,
SESSION_TIMEOUT: 10000, // 10 seconds
API_TIMEOUT: 15000, // 15 seconds
} as const;

View file

@ -0,0 +1,156 @@
/**
* QM Session Manager - Centralized session state management
*/
import { getRandomUserAgent } from '@stock-bot/utils';
import { QM_SESSION_IDS, SESSION_CONFIG } from './config';
import type { QMSession } from './types';
export class QMSessionManager {
private static instance: QMSessionManager | null = null;
private sessionCache: Record<string, QMSession[]> = {};
private isInitialized = false;
private constructor() {
// Initialize session cache with known session IDs
Object.values(QM_SESSION_IDS).forEach(sessionId => {
this.sessionCache[sessionId] = [];
});
}
static getInstance(): QMSessionManager {
if (!QMSessionManager.instance) {
QMSessionManager.instance = new QMSessionManager();
}
return QMSessionManager.instance;
}
/**
* Get a random session for the given session ID
*/
getSession(sessionId: string): QMSession | null {
const sessions = this.sessionCache[sessionId];
if (!sessions || sessions.length === 0) {
return null;
}
// Filter out sessions with excessive failures
const validSessions = sessions.filter(
session => session.failedCalls <= SESSION_CONFIG.MAX_FAILED_CALLS
);
if (validSessions.length === 0) {
return null;
}
return validSessions[Math.floor(Math.random() * validSessions.length)];
}
/**
* Add a session to the cache
*/
addSession(sessionId: string, session: QMSession): void {
if (!this.sessionCache[sessionId]) {
this.sessionCache[sessionId] = [];
}
this.sessionCache[sessionId].push(session);
}
/**
* Get all sessions for a session ID
*/
getSessions(sessionId: string): QMSession[] {
return this.sessionCache[sessionId] || [];
}
/**
* Get session count for all session IDs
*/
getSessionCount(): number {
return Object.values(this.sessionCache).reduce((total, sessions) => total + sessions.length, 0);
}
/**
* Clean up failed sessions
*/
cleanupFailedSessions(): number {
let removedCount = 0;
Object.keys(this.sessionCache).forEach(sessionId => {
const initialCount = this.sessionCache[sessionId].length;
this.sessionCache[sessionId] = this.sessionCache[sessionId].filter(
session => session.failedCalls <= SESSION_CONFIG.MAX_FAILED_CALLS
);
removedCount += initialCount - this.sessionCache[sessionId].length;
});
return removedCount;
}
getQmHeaders(): Record<string, string> {
return {
'User-Agent': getRandomUserAgent(),
Accept: '*/*',
'Accept-Language': 'en',
'Sec-Fetch-Mode': 'cors',
Origin: 'https://www.quotemedia.com',
Referer: 'https://www.quotemedia.com/',
};
}
/**
* Check if more sessions are needed for a session ID
*/
needsMoreSessions(sessionId: string): boolean {
const sessions = this.sessionCache[sessionId] || [];
const validSessions = sessions.filter(
session => session.failedCalls <= SESSION_CONFIG.MAX_FAILED_CALLS
);
return validSessions.length < SESSION_CONFIG.MIN_SESSIONS;
}
/**
* Check if session ID is at capacity
*/
isAtCapacity(sessionId: string): boolean {
const sessions = this.sessionCache[sessionId] || [];
return sessions.length >= SESSION_CONFIG.MAX_SESSIONS;
}
/**
* Get session cache statistics
*/
getStats() {
const stats: Record<string, { total: number; valid: number; failed: number }> = {};
Object.entries(this.sessionCache).forEach(([sessionId, sessions]) => {
const validSessions = sessions.filter(
session => session.failedCalls <= SESSION_CONFIG.MAX_FAILED_CALLS
);
const failedSessions = sessions.filter(
session => session.failedCalls > SESSION_CONFIG.MAX_FAILED_CALLS
);
stats[sessionId] = {
total: sessions.length,
valid: validSessions.length,
failed: failedSessions.length,
};
});
return stats;
}
/**
* Mark manager as initialized
*/
setInitialized(initialized: boolean = true): void {
this.isInitialized = initialized;
}
/**
* Check if manager is initialized
*/
getInitialized(): boolean {
return this.isInitialized;
}
}

View file

@ -0,0 +1,32 @@
/**
* Shared types for QM operations
*/
export interface QMSession {
proxy: string;
headers: Record<string, string>;
successfulCalls: number;
failedCalls: number;
lastUsed: Date;
}
export interface SymbolSpiderJob {
prefix: string | null; // null = root job (A-Z)
depth: number; // 1=A, 2=AA, 3=AAA, etc.
source: string; // 'qm'
maxDepth?: number; // optional max depth limit
}
export interface Exchange {
exchange: string;
exchangeCode: string;
exchangeShortName: string;
countryCode: string;
source: string;
}
export interface SpiderResult {
success: boolean;
symbolsFound: number;
jobsCreated: number;
}

View file

@ -0,0 +1,102 @@
/**
* WebShare Fetch Operations - API integration
*/
import { OperationContext } from '@stock-bot/di';
import type { ProxyInfo } from '@stock-bot/proxy';
import { WEBSHARE_CONFIG } from '../shared/config';
/**
* Fetch proxies from WebShare API and convert to ProxyInfo format
*/
export async function fetchWebShareProxies(): Promise<ProxyInfo[]> {
const ctx = OperationContext.create('webshare', 'fetch-proxies');
try {
// Get configuration from stock config system - ensure it's initialized
const { getStockConfig, initializeStockConfig } = await import('@stock-bot/stock-config');
// Try to get existing config, or initialize if needed
let config;
try {
config = getStockConfig();
} catch (error) {
// Config not initialized yet, initialize it
config = initializeStockConfig('dataIngestion');
}
const apiKey = config.webshare?.apiKey;
const apiUrl = config.webshare?.apiUrl;
ctx.logger.debug('WebShare config loaded', {
hasConfig: !!config,
hasWebshare: !!config.webshare,
webshareConfig: config.webshare,
apiKeyLength: apiKey?.length || 0,
apiUrl: apiUrl,
envApiKey: process.env.WEBSHARE_API_KEY ? 'SET' : 'NOT_SET',
});
if (!apiKey || !apiUrl) {
ctx.logger.error('Missing WebShare configuration', {
hasApiKey: !!apiKey,
hasApiUrl: !!apiUrl,
apiKeyValue: apiKey ? `${apiKey.substring(0, 5)}...` : 'NOT_SET',
});
return [];
}
ctx.logger.info('Fetching proxies from WebShare API', { apiUrl });
const response = await fetch(
`${apiUrl}proxy/list/?mode=${WEBSHARE_CONFIG.DEFAULT_MODE}&page=${WEBSHARE_CONFIG.DEFAULT_PAGE}&page_size=${WEBSHARE_CONFIG.DEFAULT_PAGE_SIZE}`,
{
method: 'GET',
headers: {
Authorization: `Token ${apiKey}`,
'Content-Type': 'application/json',
},
signal: AbortSignal.timeout(WEBSHARE_CONFIG.TIMEOUT),
}
);
if (!response.ok) {
ctx.logger.error('WebShare API request failed', {
status: response.status,
statusText: response.statusText,
});
return [];
}
const data = await response.json();
if (!data.results || !Array.isArray(data.results)) {
ctx.logger.error('Invalid response format from WebShare API', { data });
return [];
}
// Transform proxy data to ProxyInfo format
const proxies: ProxyInfo[] = data.results.map(
(proxy: { username: string; password: string; proxy_address: string; port: number }) => ({
source: 'webshare',
protocol: 'http' as const,
host: proxy.proxy_address,
port: proxy.port,
username: proxy.username,
password: proxy.password,
isWorking: true, // WebShare provides working proxies
firstSeen: new Date(),
lastChecked: new Date(),
})
);
ctx.logger.info('Successfully fetched proxies from WebShare', {
count: proxies.length,
total: data.count || proxies.length,
});
return proxies;
} catch (error) {
ctx.logger.error('Failed to fetch proxies from WebShare', { error });
return [];
}
}

View file

@ -0,0 +1,10 @@
/**
* WebShare Configuration Constants
*/
export const WEBSHARE_CONFIG = {
DEFAULT_PAGE_SIZE: 100,
DEFAULT_MODE: 'direct',
DEFAULT_PAGE: 1,
TIMEOUT: 10000,
};

View file

@ -0,0 +1,73 @@
import {
BaseHandler,
Handler,
Operation,
QueueSchedule,
type ExecutionContext,
type IServiceContainer,
} from '@stock-bot/handlers';
@Handler('webshare')
export class WebShareHandler extends BaseHandler {
constructor(services: IServiceContainer) {
super(services);
}
@Operation('fetch-proxies')
@QueueSchedule('0 */6 * * *', {
priority: 3,
immediately: true,
description: 'Fetch fresh proxies from WebShare API',
})
async fetchProxies(_input: unknown, _context: ExecutionContext): Promise<unknown> {
this.logger.info('Fetching proxies from WebShare API');
try {
const { fetchWebShareProxies } = await import('./operations/fetch.operations');
const proxies = await fetchWebShareProxies();
if (proxies.length > 0) {
// Update the centralized proxy manager using the injected service
if (!this.proxy) {
this.logger.warn('Proxy manager is not initialized, cannot update proxies');
return {
success: false,
proxiesUpdated: 0,
error: 'Proxy manager not initialized',
};
}
await this.proxy.updateProxies(proxies);
this.logger.info('Updated proxy manager with WebShare proxies', {
count: proxies.length,
workingCount: proxies.filter(p => p.isWorking !== false).length,
});
// Cache proxy stats for monitoring using handler's cache methods
await this.cacheSet('proxy-count', proxies.length, 3600);
await this.cacheSet(
'working-count',
proxies.filter(p => p.isWorking !== false).length,
3600
);
await this.cacheSet('last-fetch', new Date().toISOString(), 1800);
return {
success: true,
proxiesUpdated: proxies.length,
workingProxies: proxies.filter(p => p.isWorking !== false).length,
};
} else {
this.logger.warn('No proxies fetched from WebShare API');
return {
success: false,
proxiesUpdated: 0,
error: 'No proxies returned from API',
};
}
} catch (error) {
this.logger.error('Failed to fetch and update proxies', { error });
throw error;
}
}
}

View file

@ -0,0 +1,77 @@
/**
* Data Ingestion Service
* Simplified entry point using ServiceApplication framework
*/
import { initializeStockConfig } from '@stock-bot/stock-config';
import {
ServiceApplication,
} from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
// Local imports
import { initializeAllHandlers } from './handlers';
import { createRoutes } from './routes/create-routes';
// Initialize configuration with service-specific overrides
const config = initializeStockConfig('dataIngestion');
console.log('Data Ingestion Service Configuration:', JSON.stringify(config, null, 2));
// Create service application
const app = new ServiceApplication(
config,
{
serviceName: 'data-ingestion',
enableHandlers: true,
enableScheduledJobs: true,
corsConfig: {
origin: '*',
allowMethods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'PATCH'],
allowHeaders: ['Content-Type', 'Authorization'],
credentials: false,
},
serviceMetadata: {
version: '1.0.0',
description: 'Market data ingestion from multiple providers',
endpoints: {
health: '/health',
handlers: '/api/handlers',
},
},
},
{
// Lifecycle hooks if needed
onStarted: (port) => {
const logger = getLogger('data-ingestion');
logger.info('Data ingestion service startup initiated with ServiceApplication framework');
},
}
);
// Container factory function
async function createContainer(config: any) {
const { ServiceContainerBuilder } = await import('@stock-bot/di');
const container = await new ServiceContainerBuilder()
.withConfig(config)
.withOptions({
enableQuestDB: false, // Data ingestion doesn't need QuestDB yet
enableMongoDB: true,
enablePostgres: config.database?.postgres?.enabled ?? false,
enableCache: true,
enableQueue: true,
enableBrowser: true, // Data ingestion needs browser for web scraping
enableProxy: true, // Data ingestion needs proxy for rate limiting
})
.build(); // This automatically initializes services
return container;
}
// Start the service
app.start(createContainer, createRoutes, initializeAllHandlers).catch(error => {
const logger = getLogger('data-ingestion');
logger.fatal('Failed to start data service', { error });
process.exit(1);
});

View file

@ -0,0 +1,74 @@
/**
* Routes creation with improved DI pattern
*/
import { Hono } from 'hono';
import type { IServiceContainer } from '@stock-bot/handlers';
import { exchangeRoutes } from './exchange.routes';
import { healthRoutes } from './health.routes';
import { createQueueRoutes } from './queue.routes';
/**
* Creates all routes with access to type-safe services
*/
export function createRoutes(services: IServiceContainer): Hono {
const app = new Hono();
// Mount routes that don't need services
app.route('/health', healthRoutes);
// Mount routes that need services
app.route('/api/exchanges', exchangeRoutes);
app.route('/api/queue', createQueueRoutes(services));
// Store services in app context for handlers that need it
app.use('*', async (c, next) => {
c.set('services', services);
await next();
});
// Add a new endpoint to test the improved DI
app.get('/api/di-test', async c => {
try {
const services = c.get('services') as IServiceContainer;
// Test MongoDB connection
const mongoStats = services.mongodb?.getPoolMetrics?.() || {
status: services.mongodb ? 'connected' : 'disabled',
};
// Test PostgreSQL connection
const pgConnected = services.postgres?.connected || false;
// Test cache
const cacheReady = services.cache?.isReady() || false;
// Test queue
const queueStats = services.queue?.getGlobalStats() || { status: 'disabled' };
return c.json({
success: true,
message: 'Improved DI pattern is working!',
services: {
mongodb: mongoStats,
postgres: { connected: pgConnected },
cache: { ready: cacheReady },
queue: queueStats,
},
timestamp: new Date().toISOString(),
});
} catch (error) {
const services = c.get('services') as IServiceContainer;
services.logger.error('DI test endpoint failed', { error });
return c.json(
{
success: false,
error: error instanceof Error ? error.message : String(error),
},
500
);
}
});
return app;
}

View file

@ -0,0 +1,22 @@
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('exchange-routes');
const exchange = new Hono();
// Get all exchanges
exchange.get('/', async c => {
try {
// TODO: Implement exchange listing from database
return c.json({
status: 'success',
data: [],
message: 'Exchange endpoints will be implemented with database integration',
});
} catch (error) {
logger.error('Failed to get exchanges', { error });
return c.json({ status: 'error', message: 'Failed to get exchanges' }, 500);
}
});
export { exchange as exchangeRoutes };

View file

@ -0,0 +1,14 @@
import { Hono } from 'hono';
const health = new Hono();
// Health check endpoint
health.get('/', c => {
return c.json({
status: 'healthy',
service: 'data-ingestion',
timestamp: new Date().toISOString(),
});
});
export { health as healthRoutes };

View file

@ -0,0 +1,6 @@
/**
* Routes index - exports all route modules
*/
export { exchangeRoutes } from './exchange.routes';
export { healthRoutes } from './health.routes';
export { queueRoutes } from './queue.routes';

View file

@ -0,0 +1,142 @@
/**
* Market data routes
*/
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import { processItems } from '@stock-bot/queue';
import type { IServiceContainer } from '@stock-bot/handlers';
const logger = getLogger('market-data-routes');
export function createMarketDataRoutes(container: IServiceContainer) {
const marketDataRoutes = new Hono();
// Market data endpoints
marketDataRoutes.get('/api/live/:symbol', async c => {
const symbol = c.req.param('symbol');
logger.info('Live data request', { symbol });
try {
// Queue job for live data using Yahoo provider
const queueManager = container.queue;
if (!queueManager) {
return c.json({ status: 'error', message: 'Queue manager not available' }, 503);
}
const queue = queueManager.getQueue('yahoo-finance');
const job = await queue.add('live-data', {
handler: 'yahoo-finance',
operation: 'live-data',
payload: { symbol },
});
return c.json({
status: 'success',
message: 'Live data job queued',
jobId: job.id,
symbol,
});
} catch (error) {
logger.error('Failed to queue live data job', { symbol, error });
return c.json({ status: 'error', message: 'Failed to queue live data job' }, 500);
}
});
marketDataRoutes.get('/api/historical/:symbol', async c => {
const symbol = c.req.param('symbol');
const from = c.req.query('from');
const to = c.req.query('to');
logger.info('Historical data request', { symbol, from, to });
try {
const fromDate = from ? new Date(from) : new Date(Date.now() - 30 * 24 * 60 * 60 * 1000); // 30 days ago
const toDate = to ? new Date(to) : new Date(); // Now
// Queue job for historical data using Yahoo provider
const queueManager = container.queue;
if (!queueManager) {
return c.json({ status: 'error', message: 'Queue manager not available' }, 503);
}
const queue = queueManager.getQueue('yahoo-finance');
const job = await queue.add('historical-data', {
handler: 'yahoo-finance',
operation: 'historical-data',
payload: {
symbol,
from: fromDate.toISOString(),
to: toDate.toISOString(),
},
});
return c.json({
status: 'success',
message: 'Historical data job queued',
jobId: job.id,
symbol,
from: fromDate,
to: toDate,
});
} catch (error) {
logger.error('Failed to queue historical data job', { symbol, from, to, error });
return c.json({ status: 'error', message: 'Failed to queue historical data job' }, 500);
}
});
// Batch processing endpoint using new queue system
marketDataRoutes.post('/api/process-symbols', async c => {
try {
const {
symbols,
provider = 'ib',
operation = 'fetch-session',
useBatching = true,
totalDelayHours = 0.0083, // ~30 seconds (30/3600 hours)
batchSize = 10,
} = await c.req.json();
if (!symbols || !Array.isArray(symbols) || symbols.length === 0) {
return c.json({ status: 'error', message: 'Invalid symbols array' }, 400);
}
logger.info('Batch processing symbols', {
count: symbols.length,
provider,
operation,
useBatching,
});
const queueManager = container.queue;
if (!queueManager) {
return c.json({ status: 'error', message: 'Queue manager not available' }, 503);
}
const result = await processItems(symbols, provider, {
handler: provider,
operation,
totalDelayHours,
useBatching,
batchSize,
priority: 2,
retries: 2,
removeOnComplete: 5,
removeOnFail: 10,
}, queueManager);
return c.json({
status: 'success',
message: 'Batch processing initiated',
result,
symbols: symbols.length,
});
} catch (error) {
logger.error('Failed to process symbols batch', { error });
return c.json({ status: 'error', message: 'Failed to process symbols batch' }, 500);
}
});
return marketDataRoutes;
}
// Legacy export for backward compatibility
export const marketDataRoutes = createMarketDataRoutes({} as IServiceContainer);

View file

@ -0,0 +1,35 @@
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
const logger = getLogger('queue-routes');
export function createQueueRoutes(container: IServiceContainer) {
const queue = new Hono();
// Queue status endpoint
queue.get('/status', async c => {
try {
const queueManager = container.queue;
if (!queueManager) {
return c.json({ status: 'error', message: 'Queue manager not available' }, 503);
}
const globalStats = await queueManager.getGlobalStats();
return c.json({
status: 'success',
data: globalStats,
message: 'Queue status retrieved successfully',
});
} catch (error) {
logger.error('Failed to get queue status', { error });
return c.json({ status: 'error', message: 'Failed to get queue status' }, 500);
}
});
return queue;
}
// Legacy export for backward compatibility
export const queueRoutes = createQueueRoutes({} as IServiceContainer);

View file

@ -0,0 +1,40 @@
/**
* Type definitions for exchange data structures
*/
export interface IBExchange {
id: string;
country_code: string;
name: string;
code?: string;
exchange_code?: string;
currency?: string;
timezone?: string;
_id?: unknown; // MongoDB ObjectId
// Add other properties as needed
}
export interface MasterExchangeData {
id: string;
code: string;
name: string;
country: string;
currency: string;
// Add other properties as needed
}
export interface QMSymbol {
symbol: string;
exchange?: string;
name?: string;
type?: string;
// Add other properties as needed
}
export interface IBSymbol {
symbol: string;
exchange: string;
name?: string;
currency?: string;
// Add other properties as needed
}

View file

@ -0,0 +1,93 @@
/**
* Type definitions for all job payloads across data service providers
*/
// Common result types
export interface JobResult {
success: boolean;
message: string;
}
export interface CountableJobResult extends JobResult {
count: number;
}
// QM Provider Types
export interface SymbolSpiderJob {
prefix: string | null; // null = root job (A-Z)
depth: number; // 1=A, 2=AA, 3=AAA, etc.
source: string; // 'qm'
maxDepth?: number; // optional max depth limit
}
export interface CreateSessionsResult extends JobResult {
// No additional fields needed
}
export interface SearchSymbolsResult extends CountableJobResult {
symbols?: unknown[]; // First 10 symbols as sample
}
export interface SpiderSymbolSearchResult extends JobResult {
symbolsFound: number;
newSymbolsAdded: number;
duplicatesSkipped: number;
errors: number;
depth: number;
prefix: string | null;
}
// IB Provider Types
export interface FetchSessionResult extends JobResult {
sessionData?: Record<string, string>;
}
export interface FetchExchangesResult extends CountableJobResult {
exchanges?: unknown[];
}
export interface FetchSymbolsResult extends CountableJobResult {
symbols?: unknown[];
}
export interface IBExchangesAndSymbolsResult extends JobResult {
exchanges: FetchExchangesResult;
symbols: FetchSymbolsResult;
}
// Proxy Provider Types
export interface ProxyInfo {
host: string;
port: number;
protocol: 'http' | 'https' | 'socks4' | 'socks5';
username?: string;
password?: string;
country?: string;
city?: string;
isValid?: boolean;
lastChecked?: Date;
}
export interface FetchProxiesFromSourcesResult extends CountableJobResult {
proxies?: ProxyInfo[];
sources: string[];
}
export interface CheckProxyResult extends JobResult {
proxy: ProxyInfo;
responseTime?: number;
error?: string;
}
// WebShare Provider Types
export interface FetchWebShareProxiesResult extends CountableJobResult {
proxies?: ProxyInfo[];
activeProxies: number;
totalQuota: number;
remainingQuota: number;
}
// No payload job types (for operations that don't need input)
export interface NoPayload {
// Empty interface for operations that don't need payload
}

View file

@ -0,0 +1,109 @@
import { sleep } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('symbol-search-util');
export interface SearchFunction {
(query: string): Promise<string[]>;
}
export class SymbolSearchUtil {
private threshold: number;
private searchFunction: SearchFunction;
private maxDepth: number;
private delay: number;
constructor(
searchFunction: SearchFunction,
threshold: number = 50,
maxDepth: number = 4,
delay: number = 100
) {
this.searchFunction = searchFunction;
this.threshold = threshold;
this.maxDepth = maxDepth;
this.delay = delay;
}
async searchAllSymbols(): Promise<string[]> {
logger.info('Starting comprehensive symbol search...');
const allSymbols: string[] = [];
// Start with single letters A-Z
for (let i = 0; i < 26; i++) {
const singleLetter = String.fromCharCode(65 + i);
try {
const symbols = await this.searchRecursive(singleLetter, 1);
allSymbols.push(...symbols);
// Add delay between top-level searches
if (this.delay > 0) {
await sleep(this.delay);
}
} catch (error) {
logger.error(`Failed to search for "${singleLetter}":`, error);
// Continue with next letter
}
}
// Remove duplicates
const uniqueSymbols = [...new Set(allSymbols)];
logger.info(`Symbol search completed. Found ${uniqueSymbols.length} unique symbols`);
return uniqueSymbols;
}
private async searchRecursive(prefix: string, depth: number): Promise<string[]> {
try {
const symbols = await this.searchFunction(prefix);
logger.debug(`Query "${prefix}" returned ${symbols.length} symbols`);
// If we're at max depth or results are under threshold, return the symbols
if (depth >= this.maxDepth || symbols.length < this.threshold) {
logger.info(`Added ${symbols.length} symbols from query: ${prefix}`);
return symbols;
}
// If we have too many results, go deeper
logger.info(
`Query "${prefix}" returned ${symbols.length} results (>= ${this.threshold}), going deeper...`
);
const allSymbols: string[] = [];
for (let i = 0; i < 26; i++) {
const nextQuery = prefix + String.fromCharCode(65 + i);
try {
const deeperSymbols = await this.searchRecursive(nextQuery, depth + 1);
allSymbols.push(...deeperSymbols);
// Add delay between recursive calls
if (this.delay > 0 && depth < 3) {
// Only delay for first few levels
await sleep(this.delay);
}
} catch (error) {
logger.error(`Failed recursive search for "${nextQuery}":`, error);
// Continue with next combination
}
}
return allSymbols;
} catch (error) {
logger.error(`Error in recursive search for "${prefix}":`, error);
return [];
}
}
// Static method for one-off searches
static async search(
searchFunction: SearchFunction,
threshold: number = 50,
maxDepth: number = 4,
delay: number = 100
): Promise<string[]> {
const util = new SymbolSearchUtil(searchFunction, threshold, maxDepth, delay);
return util.searchAllSymbols();
}
}

View file

@ -0,0 +1,103 @@
#!/usr/bin/env bun
/**
* Test script for CEO handler operations
*/
import { initializeServiceConfig } from '@stock-bot/config';
import { createServiceContainer, initializeServices } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('test-ceo-operations');
async function testCeoOperations() {
logger.info('Testing CEO handler operations...');
try {
// Initialize config
const config = initializeServiceConfig();
// Create Awilix container
const awilixConfig = {
redis: {
host: config.database.dragonfly.host,
port: config.database.dragonfly.port,
db: config.database.dragonfly.db,
},
mongodb: {
uri: config.database.mongodb.uri,
database: config.database.mongodb.database,
},
postgres: {
host: config.database.postgres.host,
port: config.database.postgres.port,
database: config.database.postgres.database,
user: config.database.postgres.user,
password: config.database.postgres.password,
},
questdb: {
enabled: false,
host: config.database.questdb.host,
httpPort: config.database.questdb.httpPort,
pgPort: config.database.questdb.pgPort,
influxPort: config.database.questdb.ilpPort,
database: config.database.questdb.database,
},
};
const container = createServiceContainer(awilixConfig);
await initializeServices(container);
const serviceContainer = container.resolve('serviceContainer');
// Import and create CEO handler
const { CeoHandler } = await import('./src/handlers/ceo/ceo.handler');
const ceoHandler = new CeoHandler(serviceContainer);
// Test 1: Check if there are any CEO symbols in the database
logger.info('Checking for existing CEO symbols...');
const collection = serviceContainer.mongodb.collection('ceoSymbols');
const count = await collection.countDocuments();
logger.info(`Found ${count} CEO symbols in database`);
if (count > 0) {
// Test 2: Run process-unique-symbols operation
logger.info('Testing process-unique-symbols operation...');
const result = await ceoHandler.updateUniqueSymbols(undefined, {});
logger.info('Process unique symbols result:', result);
// Test 3: Test individual symbol processing
logger.info('Testing process-individual-symbol operation...');
const sampleSymbol = await collection.findOne({});
if (sampleSymbol) {
const individualResult = await ceoHandler.processIndividualSymbol(
{
ceoId: sampleSymbol.ceoId,
symbol: sampleSymbol.symbol,
exchange: sampleSymbol.exchange,
name: sampleSymbol.name,
},
{}
);
logger.info('Process individual symbol result:', individualResult);
}
} else {
logger.warn('No CEO symbols found. Run the service to populate data first.');
}
// Clean up
await serviceContainer.mongodb.disconnect();
await serviceContainer.postgres.disconnect();
if (serviceContainer.cache) {
await serviceContainer.cache.disconnect();
}
logger.info('Test completed successfully!');
process.exit(0);
} catch (error) {
logger.error('Test failed:', error);
process.exit(1);
}
}
// Run the test
testCeoOperations();

View file

@ -0,0 +1,18 @@
{
"extends": "../../tsconfig.app.json",
"references": [
{ "path": "../../libs/core/types" },
{ "path": "../../libs/core/config" },
{ "path": "../../libs/core/logger" },
{ "path": "../../libs/core/di" },
{ "path": "../../libs/core/handlers" },
{ "path": "../../libs/data/cache" },
{ "path": "../../libs/data/mongodb" },
{ "path": "../../libs/data/postgres" },
{ "path": "../../libs/data/questdb" },
{ "path": "../../libs/services/queue" },
{ "path": "../../libs/services/shutdown" },
{ "path": "../../libs/utils" },
{ "path": "../config" }
]
}

View file

@ -0,0 +1,183 @@
# Data Sync Service
The Data Sync Service handles synchronization of raw MongoDB data to PostgreSQL master records, providing a unified data layer for the stock-bot application.
## Features
### Original Sync Manager
- Basic QM (QuoteMedia) symbol and exchange synchronization
- Simple static exchange mapping
- Manual sync triggers via REST API
### Enhanced Sync Manager ✨ NEW
- **Multi-provider support**: Syncs from EOD, Interactive Brokers, and QuoteMedia
- **Comprehensive exchange handling**: Leverages all 4 MongoDB exchange collections
- **Intelligent exchange mapping**: Dynamic mapping with fallback logic
- **Transaction safety**: Full ACID compliance with rollback on errors
- **Performance optimization**: Exchange caching for faster lookups
- **Enhanced error handling**: Detailed error tracking and reporting
## API Endpoints
### Health Check
- `GET /health` - Service health status
### Original Sync Operations
- `POST /sync/symbols` - Sync QM symbols to PostgreSQL
- `POST /sync/exchanges` - Sync QM exchanges to PostgreSQL
- `GET /sync/status` - Get basic sync status
### Enhanced Sync Operations ✨ NEW
- `POST /sync/exchanges/all?clear=true` - Comprehensive exchange sync from all providers (clear=true removes dummy data first)
- `POST /sync/symbols/:provider?clear=true` - Sync symbols from specific provider (qm, eod, ib)
- `POST /sync/clear` - Clear all PostgreSQL data (exchanges, symbols, mappings)
- `GET /sync/status/enhanced` - Get detailed sync status
- `GET /sync/stats/exchanges` - Get exchange statistics
## Data Sources
### MongoDB Collections
1. **exchanges** (34 records) - Unified exchange reference
2. **eodExchanges** (78 records) - EOD provider with currency/MIC data
3. **ibExchanges** (214 records) - Interactive Brokers with asset types
4. **qmExchanges** (25 records) - QuoteMedia exchanges
### PostgreSQL Tables
1. **master_exchanges** - Unified exchange master data
2. **master_symbols** - Symbol master records
3. **provider_symbol_mappings** - Multi-provider symbol mappings
4. **sync_status** - Synchronization tracking
## Key Improvements
### 1. Multi-Provider Exchange Sync
Instead of only syncing QM exchanges, the enhanced sync manager:
- Syncs from EOD exchanges (comprehensive global data with currencies)
- Adds IB exchanges for additional coverage (214 exchanges vs 25 in QM)
### 2. Intelligent Exchange Mapping
Replaces hard-coded mapping with dynamic resolution:
```typescript
// Before: Static mapping
const exchangeMap = { 'NASDAQ': 'NASDAQ', 'NYSE': 'NYSE' };
// After: Dynamic mapping with variations
const codeMap = {
'NASDAQ': 'NASDAQ', 'NAS': 'NASDAQ',
'NYSE': 'NYSE', 'NYQ': 'NYSE',
'LSE': 'LSE', 'LON': 'LSE', 'LN': 'LSE',
'US': 'NYSE' // EOD uses 'US' for US markets
};
```
### 3. Transaction Safety
All sync operations use database transactions:
- `BEGIN` transaction at start
- `COMMIT` on success
- `ROLLBACK` on any error
- Ensures data consistency
### 4. Performance Optimization
- Exchange cache preloaded at startup
- Reduced database queries during symbol processing
- Batch operations where possible
### 5. Enhanced Error Handling
- Detailed error logging with context
- Separate error counting in sync results
- Graceful handling of missing/invalid data
## Usage Examples
### Clear All Data and Start Fresh Exchange Sync
```bash
curl -X POST "http://localhost:3005/sync/exchanges/all?clear=true"
```
### Sync Symbols from Specific Provider
```bash
# Sync QuoteMedia symbols (clear existing symbols first)
curl -X POST "http://localhost:3005/sync/symbols/qm?clear=true"
# Sync EOD symbols
curl -X POST http://localhost:3005/sync/symbols/eod
# Sync Interactive Brokers symbols
curl -X POST http://localhost:3005/sync/symbols/ib
```
### Clear All PostgreSQL Data
```bash
curl -X POST http://localhost:3005/sync/clear
```
### Get Enhanced Status
```bash
curl http://localhost:3005/sync/status/enhanced
curl http://localhost:3005/sync/stats/exchanges
```
## Configuration
### Environment Variables
- `DATA_SYNC_SERVICE_PORT` - Service port (default: 3005)
- `NODE_ENV` - Environment mode
### Database Connections
- **MongoDB**: `mongodb://trading_admin:trading_mongo_dev@localhost:27017/stock?authSource=admin`
- **PostgreSQL**: `postgresql://trading_user:trading_pass_dev@localhost:5432/trading_bot`
## Development
### Build and Run
```bash
# Development mode
bun run dev
# Build
bun run build
# Production
bun run start
```
### Testing
```bash
# Run tests
bun test
# Start infrastructure
bun run infra:up
# Test sync operations
curl -X POST http://localhost:3005/sync/exchanges/all
curl -X POST http://localhost:3005/sync/symbols/qm
```
## Architecture
```
MongoDB Collections PostgreSQL Tables
┌─ exchanges (34) ┐ ┌─ master_exchanges
├─ eodExchanges (78) ├──▶├─ master_symbols
├─ ibExchanges (214) │ ├─ provider_symbol_mappings
└─ qmExchanges (25) ┘ └─ sync_status
Enhanced Sync Manager
- Exchange caching
- Dynamic mapping
- Transaction safety
- Multi-provider support
```
## Migration Path
The enhanced sync manager is designed to work alongside the original sync manager:
1. **Immediate**: Use enhanced exchange sync for better coverage
2. **Phase 1**: Test enhanced symbol sync with each provider
3. **Phase 2**: Replace original sync manager when confident
4. **Phase 3**: Remove original sync manager and endpoints
Both managers can be used simultaneously during the transition period.

View file

@ -0,0 +1,29 @@
{
"name": "@stock-bot/data-pipeline",
"version": "1.0.0",
"description": "Data processing pipeline for syncing and transforming raw data to normalized records",
"main": "dist/index.js",
"type": "module",
"scripts": {
"dev": "bun --watch src/index.ts",
"build": "bun build src/index.ts --outdir dist --target node --external chromium-bidi --external electron --external playwright --external playwright-core",
"start": "bun dist/index.js",
"test": "bun test",
"clean": "rm -rf dist"
},
"dependencies": {
"@stock-bot/cache": "*",
"@stock-bot/config": "*",
"@stock-bot/stock-config": "*",
"@stock-bot/logger": "*",
"@stock-bot/mongodb": "*",
"@stock-bot/postgres": "*",
"@stock-bot/questdb": "*",
"@stock-bot/queue": "*",
"@stock-bot/shutdown": "*",
"hono": "^4.0.0"
},
"devDependencies": {
"typescript": "^5.0.0"
}
}

View file

@ -0,0 +1,34 @@
/**
* Service Container Setup for Data Pipeline
* Configures dependency injection for the data pipeline service
*/
import type { IServiceContainer } from '@stock-bot/handlers';
import { getLogger } from '@stock-bot/logger';
import type { AppConfig } from '@stock-bot/config';
const logger = getLogger('data-pipeline-container');
/**
* Configure the service container for data pipeline workloads
*/
export function setupServiceContainer(
config: AppConfig,
container: IServiceContainer
): IServiceContainer {
logger.info('Configuring data pipeline service container...');
// Data pipeline specific configuration
// This service does more complex queries and transformations
const poolSizes = {
mongodb: config.environment === 'production' ? 40 : 20,
postgres: config.environment === 'production' ? 50 : 25,
cache: config.environment === 'production' ? 30 : 15,
};
logger.info('Data pipeline pool sizes configured', poolSizes);
// The container is already configured with connections
// Just return it with our logging
return container;
}

View file

@ -0,0 +1,74 @@
import { getLogger } from '@stock-bot/logger';
import { handlerRegistry, createJobHandler, type HandlerConfig, type ScheduledJobConfig } from '@stock-bot/queue';
import type { IServiceContainer } from '@stock-bot/handlers';
import { exchangeOperations } from './operations';
const logger = getLogger('exchanges-handler');
const HANDLER_NAME = 'exchanges';
const exchangesHandlerConfig: HandlerConfig = {
concurrency: 1,
maxAttempts: 3,
scheduledJobs: [
{
operation: 'sync-all-exchanges',
cronPattern: '0 0 * * 0', // Weekly on Sunday at midnight
payload: { clearFirst: true },
priority: 10,
immediately: false,
} as ScheduledJobConfig,
{
operation: 'sync-qm-exchanges',
cronPattern: '0 1 * * *', // Daily at 1 AM
payload: {},
priority: 5,
immediately: false,
} as ScheduledJobConfig,
{
operation: 'sync-ib-exchanges',
cronPattern: '0 3 * * *', // Daily at 3 AM
payload: {},
priority: 3,
immediately: false,
} as ScheduledJobConfig,
{
operation: 'sync-qm-provider-mappings',
cronPattern: '0 3 * * *', // Daily at 3 AM
payload: {},
priority: 7,
immediately: false,
} as ScheduledJobConfig,
],
operations: {
'sync-all-exchanges': exchangeOperations.syncAllExchanges,
'sync-qm-exchanges': exchangeOperations.syncQMExchanges,
'sync-ib-exchanges': exchangeOperations.syncIBExchanges,
'sync-qm-provider-mappings': exchangeOperations.syncQMProviderMappings,
'clear-postgresql-data': exchangeOperations.clearPostgreSQLData,
'get-exchange-stats': exchangeOperations.getExchangeStats,
'get-provider-mapping-stats': exchangeOperations.getProviderMappingStats,
'enhanced-sync-status': exchangeOperations['enhanced-sync-status'],
},
};
export function initializeExchangesHandler(container: IServiceContainer) {
logger.info('Registering exchanges handler...');
// Update operations to use container
const containerAwareOperations = Object.entries(exchangeOperations).reduce((acc, [key, operation]) => {
acc[key] = createJobHandler(async (payload: any) => {
return operation(payload, container);
});
return acc;
}, {} as Record<string, any>);
const exchangesHandlerConfigWithContainer: HandlerConfig = {
...exchangesHandlerConfig,
operations: containerAwareOperations,
};
handlerRegistry.register(HANDLER_NAME, exchangesHandlerConfigWithContainer);
logger.info('Exchanges handler registered successfully');
}

View file

@ -0,0 +1,61 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-clear-postgresql-data');
export async function clearPostgreSQLData(
payload: JobPayload,
container: IServiceContainer
): Promise<{
exchangesCleared: number;
symbolsCleared: number;
mappingsCleared: number;
}> {
logger.info('Clearing existing PostgreSQL data...');
try {
const postgresClient = container.postgres;
// Start transaction for atomic operations
await postgresClient.query('BEGIN');
// Get counts before clearing
const exchangeCountResult = await postgresClient.query(
'SELECT COUNT(*) as count FROM exchanges'
);
const symbolCountResult = await postgresClient.query('SELECT COUNT(*) as count FROM symbols');
const mappingCountResult = await postgresClient.query(
'SELECT COUNT(*) as count FROM provider_mappings'
);
const exchangesCleared = parseInt(exchangeCountResult.rows[0].count);
const symbolsCleared = parseInt(symbolCountResult.rows[0].count);
const mappingsCleared = parseInt(mappingCountResult.rows[0].count);
// Clear data in correct order (respect foreign keys)
await postgresClient.query('DELETE FROM provider_mappings');
await postgresClient.query('DELETE FROM symbols');
await postgresClient.query('DELETE FROM exchanges');
// Reset sync status
await postgresClient.query(
'UPDATE sync_status SET last_sync_at = NULL, last_sync_count = 0, sync_errors = NULL'
);
await postgresClient.query('COMMIT');
logger.info('PostgreSQL data cleared successfully', {
exchangesCleared,
symbolsCleared,
mappingsCleared,
});
return { exchangesCleared, symbolsCleared, mappingsCleared };
} catch (error) {
const postgresClient = container.postgres;
await postgresClient.query('ROLLBACK');
logger.error('Failed to clear PostgreSQL data', { error });
throw error;
}
}

View file

@ -0,0 +1,29 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload, SyncStatus } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-status');
export async function getSyncStatus(
payload: JobPayload,
container: IServiceContainer
): Promise<SyncStatus[]> {
logger.info('Getting comprehensive sync status...');
try {
const postgresClient = container.postgres;
const query = `
SELECT provider, data_type as "dataType", last_sync_at as "lastSyncAt",
last_sync_count as "lastSyncCount", sync_errors as "syncErrors"
FROM sync_status
ORDER BY provider, data_type
`;
const result = await postgresClient.query(query);
logger.info(`Retrieved sync status for ${result.rows.length} entries`);
return result.rows;
} catch (error) {
logger.error('Failed to get sync status', { error });
throw error;
}
}

View file

@ -0,0 +1,31 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-exchange-stats');
export async function getExchangeStats(
payload: JobPayload,
container: IServiceContainer
): Promise<any> {
logger.info('Getting exchange statistics...');
try {
const postgresClient = container.postgres;
const query = `
SELECT
COUNT(*) as total_exchanges,
COUNT(CASE WHEN active = true THEN 1 END) as active_exchanges,
COUNT(DISTINCT country) as countries,
COUNT(DISTINCT currency) as currencies
FROM exchanges
`;
const result = await postgresClient.query(query);
logger.info('Retrieved exchange statistics');
return result.rows[0];
} catch (error) {
logger.error('Failed to get exchange statistics', { error });
throw error;
}
}

View file

@ -0,0 +1,19 @@
import { clearPostgreSQLData } from './clear-postgresql-data.operations';
import { getSyncStatus } from './enhanced-sync-status.operations';
import { getExchangeStats } from './exchange-stats.operations';
import { getProviderMappingStats } from './provider-mapping-stats.operations';
import { syncQMExchanges } from './qm-exchanges.operations';
import { syncAllExchanges } from './sync-all-exchanges.operations';
import { syncIBExchanges } from './sync-ib-exchanges.operations';
import { syncQMProviderMappings } from './sync-qm-provider-mappings.operations';
export const exchangeOperations = {
syncAllExchanges,
syncQMExchanges,
syncIBExchanges,
syncQMProviderMappings,
clearPostgreSQLData,
getExchangeStats,
getProviderMappingStats,
'enhanced-sync-status': getSyncStatus,
};

View file

@ -0,0 +1,35 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-provider-mapping-stats');
export async function getProviderMappingStats(
payload: JobPayload,
container: IServiceContainer
): Promise<any> {
logger.info('Getting provider mapping statistics...');
try {
const postgresClient = container.postgres;
const query = `
SELECT
provider,
COUNT(*) as total_mappings,
COUNT(CASE WHEN active = true THEN 1 END) as active_mappings,
COUNT(CASE WHEN verified = true THEN 1 END) as verified_mappings,
COUNT(CASE WHEN auto_mapped = true THEN 1 END) as auto_mapped,
AVG(confidence) as avg_confidence
FROM provider_exchange_mappings
GROUP BY provider
ORDER BY provider
`;
const result = await postgresClient.query(query);
logger.info('Retrieved provider mapping statistics');
return result.rows;
} catch (error) {
logger.error('Failed to get provider mapping statistics', { error });
throw error;
}
}

View file

@ -0,0 +1,114 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-qm-exchanges');
export async function syncQMExchanges(
payload: JobPayload,
container: IServiceContainer
): Promise<{ processed: number; created: number; updated: number }> {
logger.info('Starting QM exchanges sync...');
try {
const mongoClient = container.mongodb;
const postgresClient = container.postgres;
// 1. Get all QM exchanges from MongoDB
const qmExchanges = await mongoClient.find('qmExchanges', {});
logger.info(`Found ${qmExchanges.length} QM exchanges to process`);
let created = 0;
let updated = 0;
for (const exchange of qmExchanges) {
try {
// 2. Check if exchange exists
const existingExchange = await findExchange(exchange.exchangeCode, postgresClient);
if (existingExchange) {
// Update existing
await updateExchange(existingExchange.id, exchange, postgresClient);
updated++;
} else {
// Create new
await createExchange(exchange, postgresClient);
created++;
}
} catch (error) {
logger.error('Failed to process exchange', { error, exchange: exchange.exchangeCode });
}
}
// 3. Update sync status
await updateSyncStatus('qm', 'exchanges', qmExchanges.length, postgresClient);
const result = { processed: qmExchanges.length, created, updated };
logger.info('QM exchanges sync completed', result);
return result;
} catch (error) {
logger.error('QM exchanges sync failed', { error });
throw error;
}
}
// Helper functions
async function findExchange(exchangeCode: string, postgresClient: any): Promise<any> {
const query = 'SELECT * FROM exchanges WHERE code = $1';
const result = await postgresClient.query(query, [exchangeCode]);
return result.rows[0] || null;
}
async function createExchange(qmExchange: any, postgresClient: any): Promise<void> {
const query = `
INSERT INTO exchanges (code, name, country, currency, visible)
VALUES ($1, $2, $3, $4, $5)
ON CONFLICT (code) DO NOTHING
`;
await postgresClient.query(query, [
qmExchange.exchangeCode || qmExchange.exchange,
qmExchange.exchangeShortName || qmExchange.name,
qmExchange.countryCode || 'US',
'USD', // Default currency, can be improved
true, // New exchanges are visible by default
]);
}
async function updateExchange(
exchangeId: string,
qmExchange: any,
postgresClient: any
): Promise<void> {
const query = `
UPDATE exchanges
SET name = COALESCE($2, name),
country = COALESCE($3, country),
updated_at = NOW()
WHERE id = $1
`;
await postgresClient.query(query, [
exchangeId,
qmExchange.exchangeShortName || qmExchange.name,
qmExchange.countryCode,
]);
}
async function updateSyncStatus(
provider: string,
dataType: string,
count: number,
postgresClient: any
): Promise<void> {
const query = `
UPDATE sync_status
SET last_sync_at = NOW(),
last_sync_count = $3,
sync_errors = NULL,
updated_at = NOW()
WHERE provider = $1 AND data_type = $2
`;
await postgresClient.query(query, [provider, dataType, count]);
}

View file

@ -0,0 +1,282 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-all-exchanges');
export async function syncAllExchanges(payload: JobPayload, container: IServiceContainer): Promise<SyncResult> {
const clearFirst = payload.clearFirst || true;
logger.info('Starting comprehensive exchange sync...', { clearFirst });
const result: SyncResult = {
processed: 0,
created: 0,
updated: 0,
skipped: 0,
errors: 0,
};
try {
const postgresClient = container.postgres;
// Clear existing data if requested
if (clearFirst) {
await clearPostgreSQLData(postgresClient);
}
// Start transaction for atomic operations
await postgresClient.query('BEGIN');
// 1. Sync from EOD exchanges (comprehensive global data)
const eodResult = await syncEODExchanges(container);
mergeResults(result, eodResult);
// 2. Sync from IB exchanges (detailed asset information)
const ibResult = await syncIBExchanges(container);
mergeResults(result, ibResult);
// 3. Update sync status
await updateSyncStatus('all', 'exchanges', result.processed, postgresClient);
await postgresClient.query('COMMIT');
logger.info('Comprehensive exchange sync completed', result);
return result;
} catch (error) {
const postgresClient = container.postgres;
await postgresClient.query('ROLLBACK');
logger.error('Comprehensive exchange sync failed', { error });
throw error;
}
}
async function clearPostgreSQLData(postgresClient: any): Promise<void> {
logger.info('Clearing existing PostgreSQL data...');
// Clear data in correct order (respect foreign keys)
await postgresClient.query('DELETE FROM provider_mappings');
await postgresClient.query('DELETE FROM symbols');
await postgresClient.query('DELETE FROM exchanges');
// Reset sync status
await postgresClient.query(
'UPDATE sync_status SET last_sync_at = NULL, last_sync_count = 0, sync_errors = NULL'
);
logger.info('PostgreSQL data cleared successfully');
}
async function syncEODExchanges(container: IServiceContainer): Promise<SyncResult> {
const mongoClient = container.mongodb;
const exchanges = await mongoClient.find('eodExchanges', { active: true });
const result: SyncResult = { processed: 0, created: 0, updated: 0, skipped: 0, errors: 0 };
for (const exchange of exchanges) {
try {
// Create provider exchange mapping for EOD
await createProviderExchangeMapping(
'eod', // provider
exchange.Code,
exchange.Name,
exchange.CountryISO2,
exchange.Currency,
0.95, // very high confidence for EOD data
container
);
result.processed++;
result.created++; // Count as created mapping
} catch (error) {
logger.error('Failed to process EOD exchange', { error, exchange });
result.errors++;
}
}
return result;
}
async function syncIBExchanges(container: IServiceContainer): Promise<SyncResult> {
const mongoClient = container.mongodb;
const exchanges = await mongoClient.find('ibExchanges', {});
const result: SyncResult = { processed: 0, created: 0, updated: 0, skipped: 0, errors: 0 };
for (const exchange of exchanges) {
try {
// Create provider exchange mapping for IB
await createProviderExchangeMapping(
'ib', // provider
exchange.exchange_id,
exchange.name,
exchange.country_code,
'USD', // IB doesn't specify currency, default to USD
0.85, // good confidence for IB data
container
);
result.processed++;
result.created++; // Count as created mapping
} catch (error) {
logger.error('Failed to process IB exchange', { error, exchange });
result.errors++;
}
}
return result;
}
async function createProviderExchangeMapping(
provider: string,
providerExchangeCode: string,
providerExchangeName: string,
countryCode: string | null,
currency: string | null,
confidence: number,
container: IServiceContainer
): Promise<void> {
if (!providerExchangeCode) {
return;
}
const postgresClient = container.postgres;
// Check if mapping already exists
const existingMapping = await findProviderExchangeMapping(provider, providerExchangeCode, container);
if (existingMapping) {
// Don't override existing mappings to preserve manual work
return;
}
// Find or create master exchange
const masterExchange = await findOrCreateMasterExchange(
providerExchangeCode,
providerExchangeName,
countryCode,
currency,
container
);
// Create the provider exchange mapping
const query = `
INSERT INTO provider_exchange_mappings
(provider, provider_exchange_code, provider_exchange_name, master_exchange_id,
country_code, currency, confidence, active, auto_mapped)
VALUES ($1, $2, $3, $4, $5, $6, $7, false, true)
ON CONFLICT (provider, provider_exchange_code) DO NOTHING
`;
await postgresClient.query(query, [
provider,
providerExchangeCode,
providerExchangeName,
masterExchange.id,
countryCode,
currency,
confidence,
]);
}
async function findOrCreateMasterExchange(
providerCode: string,
providerName: string,
countryCode: string | null,
currency: string | null,
container: IServiceContainer
): Promise<any> {
const postgresClient = container.postgres;
// First, try to find exact match
let masterExchange = await findExchangeByCode(providerCode, container);
if (masterExchange) {
return masterExchange;
}
// Try to find by similar codes (basic mapping)
const basicMapping = getBasicExchangeMapping(providerCode);
if (basicMapping) {
masterExchange = await findExchangeByCode(basicMapping, container);
if (masterExchange) {
return masterExchange;
}
}
// Create new master exchange (inactive by default)
const query = `
INSERT INTO exchanges (code, name, country, currency, active)
VALUES ($1, $2, $3, $4, false)
ON CONFLICT (code) DO UPDATE SET
name = COALESCE(EXCLUDED.name, exchanges.name),
country = COALESCE(EXCLUDED.country, exchanges.country),
currency = COALESCE(EXCLUDED.currency, exchanges.currency)
RETURNING id, code, name, country, currency
`;
const result = await postgresClient.query(query, [
providerCode,
providerName || providerCode,
countryCode || 'US',
currency || 'USD',
]);
return result.rows[0];
}
function getBasicExchangeMapping(providerCode: string): string | null {
const mappings: Record<string, string> = {
NYE: 'NYSE',
NAS: 'NASDAQ',
TO: 'TSX',
LN: 'LSE',
LON: 'LSE',
};
return mappings[providerCode.toUpperCase()] || null;
}
async function findProviderExchangeMapping(
provider: string,
providerExchangeCode: string,
container: IServiceContainer
): Promise<any> {
const postgresClient = container.postgres;
const query =
'SELECT * FROM provider_exchange_mappings WHERE provider = $1 AND provider_exchange_code = $2';
const result = await postgresClient.query(query, [provider, providerExchangeCode]);
return result.rows[0] || null;
}
async function findExchangeByCode(code: string, container: IServiceContainer): Promise<any> {
const postgresClient = container.postgres;
const query = 'SELECT * FROM exchanges WHERE code = $1';
const result = await postgresClient.query(query, [code]);
return result.rows[0] || null;
}
async function updateSyncStatus(
provider: string,
dataType: string,
count: number,
postgresClient: any
): Promise<void> {
const query = `
INSERT INTO sync_status (provider, data_type, last_sync_at, last_sync_count, sync_errors)
VALUES ($1, $2, NOW(), $3, NULL)
ON CONFLICT (provider, data_type)
DO UPDATE SET
last_sync_at = NOW(),
last_sync_count = EXCLUDED.last_sync_count,
sync_errors = NULL,
updated_at = NOW()
`;
await postgresClient.query(query, [provider, dataType, count]);
}
function mergeResults(target: SyncResult, source: SyncResult): void {
target.processed += source.processed;
target.created += source.created;
target.updated += source.updated;
target.skipped += source.skipped;
target.errors += source.errors;
}

View file

@ -0,0 +1,209 @@
import { getLogger } from '@stock-bot/logger';
import type { MasterExchange } from '@stock-bot/mongodb';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-ib-exchanges');
interface IBExchange {
id?: string;
_id?: any;
name?: string;
code?: string;
country_code?: string;
currency?: string;
}
export async function syncIBExchanges(
payload: JobPayload,
container: IServiceContainer
): Promise<{ syncedCount: number; totalExchanges: number }> {
logger.info('Syncing IB exchanges from database...');
try {
const mongoClient = container.mongodb;
const db = mongoClient.getDatabase();
// Filter by country code US and CA
const ibExchanges = await db
.collection<IBExchange>('ibExchanges')
.find({
country_code: { $in: ['US', 'CA'] },
})
.toArray();
logger.info('Found IB exchanges in database', { count: ibExchanges.length });
let syncedCount = 0;
for (const exchange of ibExchanges) {
try {
await createOrUpdateMasterExchange(exchange, container);
syncedCount++;
logger.debug('Synced IB exchange', {
ibId: exchange.id,
country: exchange.country_code,
});
} catch (error) {
logger.error('Failed to sync IB exchange', { exchange: exchange.id, error });
}
}
logger.info('IB exchange sync completed', {
syncedCount,
totalExchanges: ibExchanges.length,
});
return { syncedCount, totalExchanges: ibExchanges.length };
} catch (error) {
logger.error('Failed to fetch IB exchanges from database', { error });
return { syncedCount: 0, totalExchanges: 0 };
}
}
/**
* Create or update master exchange record 1:1 from IB exchange
*/
async function createOrUpdateMasterExchange(ibExchange: IBExchange, container: IServiceContainer): Promise<void> {
const mongoClient = container.mongodb;
const db = mongoClient.getDatabase();
const collection = db.collection<MasterExchange>('masterExchanges');
const masterExchangeId = generateMasterExchangeId(ibExchange);
const now = new Date();
// Check if master exchange already exists
const existing = await collection.findOne({ masterExchangeId });
if (existing) {
// Update existing record
await collection.updateOne(
{ masterExchangeId },
{
$set: {
officialName: ibExchange.name || `Exchange ${ibExchange.id}`,
country: ibExchange.country_code || 'UNKNOWN',
currency: ibExchange.currency || 'USD',
timezone: inferTimezone(ibExchange),
updated_at: now,
},
}
);
logger.debug('Updated existing master exchange', { masterExchangeId });
} else {
// Create new master exchange
const masterExchange: MasterExchange = {
masterExchangeId,
shortName: masterExchangeId, // Set shortName to masterExchangeId on creation
officialName: ibExchange.name || `Exchange ${ibExchange.id}`,
country: ibExchange.country_code || 'UNKNOWN',
currency: ibExchange.currency || 'USD',
timezone: inferTimezone(ibExchange),
active: false, // Set active to false only on creation
sourceMappings: {
ib: {
id: ibExchange.id || ibExchange._id?.toString() || 'unknown',
name: ibExchange.name || `Exchange ${ibExchange.id}`,
code: ibExchange.code || ibExchange.id || '',
aliases: generateAliases(ibExchange),
lastUpdated: now,
},
},
confidence: 1.0, // High confidence for direct IB mapping
verified: true, // Mark as verified since it's direct from IB
// DocumentBase fields
source: 'ib-exchange-sync',
created_at: now,
updated_at: now,
};
await collection.insertOne(masterExchange);
logger.debug('Created new master exchange', { masterExchangeId });
}
}
/**
* Generate master exchange ID from IB exchange
*/
function generateMasterExchangeId(ibExchange: IBExchange): string {
// Use code if available, otherwise use ID, otherwise generate from name
if (ibExchange.code) {
return ibExchange.code.toUpperCase().replace(/[^A-Z0-9]/g, '');
}
if (ibExchange.id) {
return ibExchange.id.toUpperCase().replace(/[^A-Z0-9]/g, '');
}
if (ibExchange.name) {
return ibExchange.name
.toUpperCase()
.split(' ')
.slice(0, 2)
.join('_')
.replace(/[^A-Z0-9_]/g, '');
}
return 'UNKNOWN_EXCHANGE';
}
/**
* Generate aliases for the exchange
*/
function generateAliases(ibExchange: IBExchange): string[] {
const aliases: string[] = [];
if (ibExchange.name && ibExchange.name.includes(' ')) {
// Add abbreviated version
aliases.push(
ibExchange.name
.split(' ')
.map(w => w[0])
.join('')
.toUpperCase()
);
}
if (ibExchange.code) {
aliases.push(ibExchange.code.toUpperCase());
}
return aliases;
}
/**
* Infer timezone from exchange name/location
*/
function inferTimezone(ibExchange: IBExchange): string {
if (!ibExchange.name) {
return 'UTC';
}
const name = ibExchange.name.toUpperCase();
if (name.includes('NEW YORK') || name.includes('NYSE') || name.includes('NASDAQ')) {
return 'America/New_York';
}
if (name.includes('LONDON')) {
return 'Europe/London';
}
if (name.includes('TOKYO')) {
return 'Asia/Tokyo';
}
if (name.includes('SHANGHAI')) {
return 'Asia/Shanghai';
}
if (name.includes('TORONTO')) {
return 'America/Toronto';
}
if (name.includes('FRANKFURT')) {
return 'Europe/Berlin';
}
return 'UTC'; // Default
}

View file

@ -0,0 +1,216 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-qm-provider-mappings');
export async function syncQMProviderMappings(
payload: JobPayload,
container: IServiceContainer
): Promise<SyncResult> {
logger.info('Starting QM provider exchange mappings sync...');
const result: SyncResult = {
processed: 0,
created: 0,
updated: 0,
skipped: 0,
errors: 0,
};
try {
const mongoClient = container.mongodb;
const postgresClient = container.postgres;
// Start transaction
await postgresClient.query('BEGIN');
// Get unique exchange combinations from QM symbols
const db = mongoClient.getDatabase();
const pipeline = [
{
$group: {
_id: {
exchangeCode: '$exchangeCode',
exchange: '$exchange',
countryCode: '$countryCode',
},
count: { $sum: 1 },
sampleExchange: { $first: '$exchange' },
},
},
{
$project: {
exchangeCode: '$_id.exchangeCode',
exchange: '$_id.exchange',
countryCode: '$_id.countryCode',
count: 1,
sampleExchange: 1,
},
},
];
const qmExchanges = await db.collection('qmSymbols').aggregate(pipeline).toArray();
logger.info(`Found ${qmExchanges.length} unique QM exchange combinations`);
for (const exchange of qmExchanges) {
try {
// Create provider exchange mapping for QM
await createProviderExchangeMapping(
'qm', // provider
exchange.exchangeCode,
exchange.sampleExchange || exchange.exchangeCode,
exchange.countryCode,
exchange.countryCode === 'CA' ? 'CAD' : 'USD', // Simple currency mapping
0.8, // good confidence for QM data
container
);
result.processed++;
result.created++;
} catch (error) {
logger.error('Failed to process QM exchange mapping', { error, exchange });
result.errors++;
}
}
await postgresClient.query('COMMIT');
logger.info('QM provider exchange mappings sync completed', result);
return result;
} catch (error) {
const postgresClient = container.postgres;
await postgresClient.query('ROLLBACK');
logger.error('QM provider exchange mappings sync failed', { error });
throw error;
}
}
async function createProviderExchangeMapping(
provider: string,
providerExchangeCode: string,
providerExchangeName: string,
countryCode: string | null,
currency: string | null,
confidence: number,
container: IServiceContainer
): Promise<void> {
if (!providerExchangeCode) {
return;
}
const postgresClient = container.postgres;
// Check if mapping already exists
const existingMapping = await findProviderExchangeMapping(provider, providerExchangeCode, container);
if (existingMapping) {
// Don't override existing mappings to preserve manual work
return;
}
// Find or create master exchange
const masterExchange = await findOrCreateMasterExchange(
providerExchangeCode,
providerExchangeName,
countryCode,
currency,
container
);
// Create the provider exchange mapping
const query = `
INSERT INTO provider_exchange_mappings
(provider, provider_exchange_code, provider_exchange_name, master_exchange_id,
country_code, currency, confidence, active, auto_mapped)
VALUES ($1, $2, $3, $4, $5, $6, $7, false, true)
ON CONFLICT (provider, provider_exchange_code) DO NOTHING
`;
await postgresClient.query(query, [
provider,
providerExchangeCode,
providerExchangeName,
masterExchange.id,
countryCode,
currency,
confidence,
]);
}
async function findProviderExchangeMapping(
provider: string,
providerExchangeCode: string,
container: IServiceContainer
): Promise<any> {
const postgresClient = container.postgres;
const query =
'SELECT * FROM provider_exchange_mappings WHERE provider = $1 AND provider_exchange_code = $2';
const result = await postgresClient.query(query, [provider, providerExchangeCode]);
return result.rows[0] || null;
}
async function findOrCreateMasterExchange(
providerCode: string,
providerName: string,
countryCode: string | null,
currency: string | null,
container: IServiceContainer
): Promise<any> {
const postgresClient = container.postgres;
// First, try to find exact match
let masterExchange = await findExchangeByCode(providerCode, container);
if (masterExchange) {
return masterExchange;
}
// Try to find by similar codes (basic mapping)
const basicMapping = getBasicExchangeMapping(providerCode);
if (basicMapping) {
masterExchange = await findExchangeByCode(basicMapping, container);
if (masterExchange) {
return masterExchange;
}
}
// Create new master exchange (inactive by default)
const query = `
INSERT INTO exchanges (code, name, country, currency, active)
VALUES ($1, $2, $3, $4, false)
ON CONFLICT (code) DO UPDATE SET
name = COALESCE(EXCLUDED.name, exchanges.name),
country = COALESCE(EXCLUDED.country, exchanges.country),
currency = COALESCE(EXCLUDED.currency, exchanges.currency)
RETURNING id, code, name, country, currency
`;
const result = await postgresClient.query(query, [
providerCode,
providerName || providerCode,
countryCode || 'US',
currency || 'USD',
]);
return result.rows[0];
}
function getBasicExchangeMapping(providerCode: string): string | null {
const mappings: Record<string, string> = {
NYE: 'NYSE',
NAS: 'NASDAQ',
TO: 'TSX',
LN: 'LSE',
LON: 'LSE',
};
return mappings[providerCode.toUpperCase()] || null;
}
async function findExchangeByCode(code: string, container: IServiceContainer): Promise<any> {
const postgresClient = container.postgres;
const query = 'SELECT * FROM exchanges WHERE code = $1';
const result = await postgresClient.query(query, [code]);
return result.rows[0] || null;
}

View file

@ -0,0 +1,33 @@
/**
* Handler initialization for data pipeline service
* Registers all handlers with the service container
*/
import type { ServiceContainer } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
import { initializeExchangesHandler } from './exchanges/exchanges.handler';
import { initializeSymbolsHandler } from './symbols/symbols.handler';
const logger = getLogger('pipeline-handler-init');
/**
* Initialize all handlers with the service container
*/
export async function initializeAllHandlers(container: ServiceContainer): Promise<void> {
logger.info('Initializing data pipeline handlers...');
try {
// Initialize exchanges handler with container
initializeExchangesHandler(container);
logger.debug('Exchanges handler initialized');
// Initialize symbols handler with container
initializeSymbolsHandler(container);
logger.debug('Symbols handler initialized');
logger.info('All pipeline handlers initialized successfully');
} catch (error) {
logger.error('Failed to initialize handlers', { error });
throw error;
}
}

View file

@ -0,0 +1,9 @@
import { syncQMSymbols } from './qm-symbols.operations';
import { getSyncStatus } from './sync-status.operations';
import { syncSymbolsFromProvider } from './sync-symbols-from-provider.operations';
export const symbolOperations = {
syncQMSymbols,
syncSymbolsFromProvider,
getSyncStatus,
};

View file

@ -0,0 +1,184 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-qm-symbols');
export async function syncQMSymbols(
payload: JobPayload,
container: IServiceContainer
): Promise<{ processed: number; created: number; updated: number }> {
logger.info('Starting QM symbols sync...');
try {
const mongoClient = container.mongodb;
const postgresClient = container.postgres;
// 1. Get all QM symbols from MongoDB
const qmSymbols = await mongoClient.find('qmSymbols', {});
logger.info(`Found ${qmSymbols.length} QM symbols to process`);
let created = 0;
let updated = 0;
for (const symbol of qmSymbols) {
try {
// 2. Resolve exchange
const exchangeId = await resolveExchange(
symbol.exchangeCode || symbol.exchange,
postgresClient
);
if (!exchangeId) {
logger.warn('Unknown exchange, skipping symbol', {
symbol: symbol.symbol,
exchange: symbol.exchangeCode || symbol.exchange,
});
continue;
}
// 3. Check if symbol exists
const existingSymbol = await findSymbol(symbol.symbol, exchangeId, postgresClient);
if (existingSymbol) {
// Update existing
await updateSymbol(existingSymbol.id, symbol, postgresClient);
await upsertProviderMapping(existingSymbol.id, 'qm', symbol, postgresClient);
updated++;
} else {
// Create new
const newSymbolId = await createSymbol(symbol, exchangeId, postgresClient);
await upsertProviderMapping(newSymbolId, 'qm', symbol, postgresClient);
created++;
}
} catch (error) {
logger.error('Failed to process symbol', { error, symbol: symbol.symbol });
}
}
// 4. Update sync status
await updateSyncStatus('qm', 'symbols', qmSymbols.length, postgresClient);
const result = { processed: qmSymbols.length, created, updated };
logger.info('QM symbols sync completed', result);
return result;
} catch (error) {
logger.error('QM symbols sync failed', { error });
throw error;
}
}
// Helper functions
async function resolveExchange(exchangeCode: string, postgresClient: any): Promise<string | null> {
if (!exchangeCode) {
return null;
}
// Simple mapping - expand this as needed
const exchangeMap: Record<string, string> = {
NASDAQ: 'NASDAQ',
NYSE: 'NYSE',
TSX: 'TSX',
TSE: 'TSX', // TSE maps to TSX
LSE: 'LSE',
CME: 'CME',
};
const normalizedCode = exchangeMap[exchangeCode.toUpperCase()];
if (!normalizedCode) {
return null;
}
const query = 'SELECT id FROM exchanges WHERE code = $1';
const result = await postgresClient.query(query, [normalizedCode]);
return result.rows[0]?.id || null;
}
async function findSymbol(symbol: string, exchangeId: string, postgresClient: any): Promise<any> {
const query = 'SELECT * FROM symbols WHERE symbol = $1 AND exchange_id = $2';
const result = await postgresClient.query(query, [symbol, exchangeId]);
return result.rows[0] || null;
}
async function createSymbol(
qmSymbol: any,
exchangeId: string,
postgresClient: any
): Promise<string> {
const query = `
INSERT INTO symbols (symbol, exchange_id, company_name, country, currency)
VALUES ($1, $2, $3, $4, $5)
RETURNING id
`;
const result = await postgresClient.query(query, [
qmSymbol.symbol,
exchangeId,
qmSymbol.companyName || qmSymbol.name,
qmSymbol.countryCode || 'US',
qmSymbol.currency || 'USD',
]);
return result.rows[0].id;
}
async function updateSymbol(symbolId: string, qmSymbol: any, postgresClient: any): Promise<void> {
const query = `
UPDATE symbols
SET company_name = COALESCE($2, company_name),
country = COALESCE($3, country),
currency = COALESCE($4, currency),
updated_at = NOW()
WHERE id = $1
`;
await postgresClient.query(query, [
symbolId,
qmSymbol.companyName || qmSymbol.name,
qmSymbol.countryCode,
qmSymbol.currency,
]);
}
async function upsertProviderMapping(
symbolId: string,
provider: string,
qmSymbol: any,
postgresClient: any
): Promise<void> {
const query = `
INSERT INTO provider_mappings
(symbol_id, provider, provider_symbol, provider_exchange, last_seen)
VALUES ($1, $2, $3, $4, NOW())
ON CONFLICT (provider, provider_symbol)
DO UPDATE SET
symbol_id = EXCLUDED.symbol_id,
provider_exchange = EXCLUDED.provider_exchange,
last_seen = NOW()
`;
await postgresClient.query(query, [
symbolId,
provider,
qmSymbol.qmSearchCode || qmSymbol.symbol,
qmSymbol.exchangeCode || qmSymbol.exchange,
]);
}
async function updateSyncStatus(
provider: string,
dataType: string,
count: number,
postgresClient: any
): Promise<void> {
const query = `
UPDATE sync_status
SET last_sync_at = NOW(),
last_sync_count = $3,
sync_errors = NULL,
updated_at = NOW()
WHERE provider = $1 AND data_type = $2
`;
await postgresClient.query(query, [provider, dataType, count]);
}

View file

@ -0,0 +1,24 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('sync-status');
export async function getSyncStatus(
payload: JobPayload,
container: IServiceContainer
): Promise<Record<string, unknown>[]> {
logger.info('Getting sync status...');
try {
const postgresClient = container.postgres;
const query = 'SELECT * FROM sync_status ORDER BY provider, data_type';
const result = await postgresClient.query(query);
logger.info(`Retrieved sync status for ${result.rows.length} entries`);
return result.rows;
} catch (error) {
logger.error('Failed to get sync status', { error });
throw error;
}
}

View file

@ -0,0 +1,237 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-symbols-from-provider');
export async function syncSymbolsFromProvider(
payload: JobPayload,
container: IServiceContainer
): Promise<SyncResult> {
const provider = payload.provider;
const clearFirst = payload.clearFirst || false;
if (!provider) {
throw new Error('Provider is required in payload');
}
logger.info(`Starting ${provider} symbols sync...`, { clearFirst });
const result: SyncResult = {
processed: 0,
created: 0,
updated: 0,
skipped: 0,
errors: 0,
};
try {
const mongoClient = container.mongodb;
const postgresClient = container.postgres;
// Clear existing data if requested (only symbols and mappings, keep exchanges)
if (clearFirst) {
await postgresClient.query('BEGIN');
await postgresClient.query('DELETE FROM provider_mappings');
await postgresClient.query('DELETE FROM symbols');
await postgresClient.query('COMMIT');
logger.info('Cleared existing symbols and mappings before sync');
}
// Start transaction
await postgresClient.query('BEGIN');
let symbols: Record<string, unknown>[] = [];
// Get symbols based on provider
const db = mongoClient.getDatabase();
switch (provider.toLowerCase()) {
case 'qm':
symbols = await db.collection('qmSymbols').find({}).toArray();
break;
case 'eod':
symbols = await db.collection('eodSymbols').find({}).toArray();
break;
case 'ib':
symbols = await db.collection('ibSymbols').find({}).toArray();
break;
default:
throw new Error(`Unsupported provider: ${provider}`);
}
logger.info(`Found ${symbols.length} ${provider} symbols to process`);
result.processed = symbols.length;
for (const symbol of symbols) {
try {
await processSingleSymbol(symbol, provider, result, container);
} catch (error) {
logger.error('Failed to process symbol', {
error,
symbol: symbol.symbol || symbol.code,
provider,
});
result.errors++;
}
}
// Update sync status
await updateSyncStatus(provider, 'symbols', result.processed, container.postgres);
await postgresClient.query('COMMIT');
logger.info(`${provider} symbols sync completed`, result);
return result;
} catch (error) {
await container.postgres.query('ROLLBACK');
logger.error(`${provider} symbols sync failed`, { error });
throw error;
}
}
async function processSingleSymbol(
symbol: any,
provider: string,
result: SyncResult,
container: IServiceContainer
): Promise<void> {
const symbolCode = symbol.symbol || symbol.code;
const exchangeCode = symbol.exchangeCode || symbol.exchange || symbol.exchange_id;
if (!symbolCode || !exchangeCode) {
result.skipped++;
return;
}
// Find active provider exchange mapping
const providerMapping = await findActiveProviderExchangeMapping(provider, exchangeCode, container);
if (!providerMapping) {
result.skipped++;
return;
}
// Check if symbol exists
const existingSymbol = await findSymbolByCodeAndExchange(
symbolCode,
providerMapping.master_exchange_id,
container
);
if (existingSymbol) {
await updateSymbol(existingSymbol.id, symbol, container);
await upsertProviderMapping(existingSymbol.id, provider, symbol, container);
result.updated++;
} else {
const newSymbolId = await createSymbol(symbol, providerMapping.master_exchange_id, container);
await upsertProviderMapping(newSymbolId, provider, symbol, container);
result.created++;
}
}
async function findActiveProviderExchangeMapping(
provider: string,
providerExchangeCode: string,
container: IServiceContainer
): Promise<any> {
const postgresClient = container.postgres;
const query = `
SELECT pem.*, e.code as master_exchange_code
FROM provider_exchange_mappings pem
JOIN exchanges e ON pem.master_exchange_id = e.id
WHERE pem.provider = $1 AND pem.provider_exchange_code = $2 AND pem.active = true
`;
const result = await postgresClient.query(query, [provider, providerExchangeCode]);
return result.rows[0] || null;
}
async function findSymbolByCodeAndExchange(symbol: string, exchangeId: string, container: IServiceContainer): Promise<any> {
const postgresClient = container.postgres;
const query = 'SELECT * FROM symbols WHERE symbol = $1 AND exchange_id = $2';
const result = await postgresClient.query(query, [symbol, exchangeId]);
return result.rows[0] || null;
}
async function createSymbol(symbol: any, exchangeId: string, container: IServiceContainer): Promise<string> {
const postgresClient = container.postgres;
const query = `
INSERT INTO symbols (symbol, exchange_id, company_name, country, currency)
VALUES ($1, $2, $3, $4, $5)
RETURNING id
`;
const result = await postgresClient.query(query, [
symbol.symbol || symbol.code,
exchangeId,
symbol.companyName || symbol.name || symbol.company_name,
symbol.countryCode || symbol.country_code || 'US',
symbol.currency || 'USD',
]);
return result.rows[0].id;
}
async function updateSymbol(symbolId: string, symbol: any, container: IServiceContainer): Promise<void> {
const postgresClient = container.postgres;
const query = `
UPDATE symbols
SET company_name = COALESCE($2, company_name),
country = COALESCE($3, country),
currency = COALESCE($4, currency),
updated_at = NOW()
WHERE id = $1
`;
await postgresClient.query(query, [
symbolId,
symbol.companyName || symbol.name || symbol.company_name,
symbol.countryCode || symbol.country_code,
symbol.currency,
]);
}
async function upsertProviderMapping(
symbolId: string,
provider: string,
symbol: any,
container: IServiceContainer
): Promise<void> {
const postgresClient = container.postgres;
const query = `
INSERT INTO provider_mappings
(symbol_id, provider, provider_symbol, provider_exchange, last_seen)
VALUES ($1, $2, $3, $4, NOW())
ON CONFLICT (provider, provider_symbol)
DO UPDATE SET
symbol_id = EXCLUDED.symbol_id,
provider_exchange = EXCLUDED.provider_exchange,
last_seen = NOW()
`;
await postgresClient.query(query, [
symbolId,
provider,
symbol.qmSearchCode || symbol.symbol || symbol.code,
symbol.exchangeCode || symbol.exchange || symbol.exchange_id,
]);
}
async function updateSyncStatus(
provider: string,
dataType: string,
count: number,
postgresClient: any
): Promise<void> {
const query = `
INSERT INTO sync_status (provider, data_type, last_sync_at, last_sync_count, sync_errors)
VALUES ($1, $2, NOW(), $3, NULL)
ON CONFLICT (provider, data_type)
DO UPDATE SET
last_sync_at = NOW(),
last_sync_count = EXCLUDED.last_sync_count,
sync_errors = NULL,
updated_at = NOW()
`;
await postgresClient.query(query, [provider, dataType, count]);
}

View file

@ -0,0 +1,56 @@
import { getLogger } from '@stock-bot/logger';
import { handlerRegistry, createJobHandler, type HandlerConfig, type ScheduledJobConfig } from '@stock-bot/queue';
import type { ServiceContainer } from '@stock-bot/di';
import { symbolOperations } from './operations';
const logger = getLogger('symbols-handler');
const HANDLER_NAME = 'symbols';
const symbolsHandlerConfig: HandlerConfig = {
concurrency: 1,
maxAttempts: 3,
scheduledJobs: [
{
operation: 'sync-qm-symbols',
cronPattern: '0 2 * * *', // Daily at 2 AM
payload: {},
priority: 5,
immediately: false,
} as ScheduledJobConfig,
{
operation: 'sync-symbols-qm',
cronPattern: '0 4 * * *', // Daily at 4 AM
payload: { provider: 'qm', clearFirst: false },
priority: 5,
immediately: false,
} as ScheduledJobConfig,
],
operations: {
'sync-qm-symbols': symbolOperations.syncQMSymbols,
'sync-symbols-qm': symbolOperations.syncSymbolsFromProvider,
'sync-symbols-eod': symbolOperations.syncSymbolsFromProvider,
'sync-symbols-ib': symbolOperations.syncSymbolsFromProvider,
'sync-status': symbolOperations.getSyncStatus,
},
};
export function initializeSymbolsHandler(container: ServiceContainer): void {
logger.info('Registering symbols handler...');
// Update operations to use container
const containerAwareOperations = Object.entries(symbolOperations).reduce((acc, [key, operation]) => {
acc[key] = createJobHandler(async (payload: any) => {
return operation(payload, container);
});
return acc;
}, {} as Record<string, any>);
const symbolsHandlerConfigWithContainer: HandlerConfig = {
...symbolsHandlerConfig,
operations: containerAwareOperations,
};
handlerRegistry.register(HANDLER_NAME, symbolsHandlerConfigWithContainer);
logger.info('Symbols handler registered successfully');
}

View file

@ -0,0 +1,80 @@
/**
* Data Pipeline Service
* Simplified entry point using ServiceApplication framework
*/
import { initializeStockConfig } from '@stock-bot/stock-config';
import {
ServiceApplication,
createServiceContainerFromConfig,
initializeServices as initializeAwilixServices,
} from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
// Local imports
import { initializeAllHandlers } from './handlers';
import { createRoutes } from './routes/create-routes';
import { setupServiceContainer } from './container-setup';
// Initialize configuration with service-specific overrides
const config = initializeStockConfig('dataPipeline');
console.log('Data Pipeline Service Configuration:', JSON.stringify(config, null, 2));
// Create service application
const app = new ServiceApplication(
config,
{
serviceName: 'data-pipeline',
enableHandlers: true,
enableScheduledJobs: true,
corsConfig: {
origin: '*',
allowMethods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS', 'PATCH'],
allowHeaders: ['Content-Type', 'Authorization'],
credentials: false,
},
serviceMetadata: {
version: '1.0.0',
description: 'Data processing and transformation pipeline',
endpoints: {
health: '/health',
operations: '/api/operations',
},
},
},
{
// Custom lifecycle hooks
onContainerReady: (container) => {
// Setup service-specific configuration
const enhancedContainer = setupServiceContainer(config, container);
return enhancedContainer;
},
onStarted: (port) => {
const logger = getLogger('data-pipeline');
logger.info('Data pipeline service startup initiated with ServiceApplication framework');
},
}
);
// Container factory function
async function createContainer(config: any) {
const container = createServiceContainerFromConfig(config, {
enableQuestDB: config.database.questdb?.enabled || false,
// Data pipeline needs all databases
enableMongoDB: true,
enablePostgres: true,
enableCache: true,
enableQueue: true,
enableBrowser: false, // Data pipeline doesn't need browser
enableProxy: false, // Data pipeline doesn't need proxy
});
await initializeAwilixServices(container);
return container;
}
// Start the service
app.start(createContainer, createRoutes, initializeAllHandlers).catch(error => {
const logger = getLogger('data-pipeline');
logger.fatal('Failed to start data pipeline service', { error });
process.exit(1);
});

View file

@ -0,0 +1,29 @@
/**
* Route factory for data pipeline service
* Creates routes with access to the service container
*/
import { Hono } from 'hono';
import type { IServiceContainer } from '@stock-bot/handlers';
import { healthRoutes } from './health.routes';
import { createSyncRoutes } from './sync.routes';
import { createEnhancedSyncRoutes } from './enhanced-sync.routes';
import { createStatsRoutes } from './stats.routes';
export function createRoutes(container: IServiceContainer): Hono {
const app = new Hono();
// Add container to context for all routes
app.use('*', async (c, next) => {
c.set('container', container);
await next();
});
// Mount routes
app.route('/health', healthRoutes);
app.route('/sync', createSyncRoutes(container));
app.route('/sync', createEnhancedSyncRoutes(container));
app.route('/sync/stats', createStatsRoutes(container));
return app;
}

View file

@ -0,0 +1,154 @@
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
const logger = getLogger('enhanced-sync-routes');
export function createEnhancedSyncRoutes(container: IServiceContainer) {
const enhancedSync = new Hono();
// Enhanced sync endpoints
enhancedSync.post('/exchanges/all', async c => {
try {
const clearFirst = c.req.query('clear') === 'true';
const queueManager = container.queue;
if (!queueManager) {
return c.json({ success: false, error: 'Queue manager not available' }, 503);
}
const exchangesQueue = queueManager.getQueue('exchanges');
const job = await exchangesQueue.addJob('sync-all-exchanges', {
handler: 'exchanges',
operation: 'sync-all-exchanges',
payload: { clearFirst },
});
return c.json({ success: true, jobId: job.id, message: 'Enhanced exchange sync job queued' });
} catch (error) {
logger.error('Failed to queue enhanced exchange sync job', { error });
return c.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
500
);
}
});
enhancedSync.post('/provider-mappings/qm', async c => {
try {
const queueManager = container.queue;
if (!queueManager) {
return c.json({ success: false, error: 'Queue manager not available' }, 503);
}
const exchangesQueue = queueManager.getQueue('exchanges');
const job = await exchangesQueue.addJob('sync-qm-provider-mappings', {
handler: 'exchanges',
operation: 'sync-qm-provider-mappings',
payload: {},
});
return c.json({
success: true,
jobId: job.id,
message: 'QM provider mappings sync job queued',
});
} catch (error) {
logger.error('Failed to queue QM provider mappings sync job', { error });
return c.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
500
);
}
});
enhancedSync.post('/provider-mappings/ib', async c => {
try {
const queueManager = container.queue;
if (!queueManager) {
return c.json({ success: false, error: 'Queue manager not available' }, 503);
}
const exchangesQueue = queueManager.getQueue('exchanges');
const job = await exchangesQueue.addJob('sync-ib-exchanges', {
handler: 'exchanges',
operation: 'sync-ib-exchanges',
payload: {},
});
return c.json({
success: true,
jobId: job.id,
message: 'IB exchanges sync job queued',
});
} catch (error) {
logger.error('Failed to queue IB exchanges sync job', { error });
return c.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
500
);
}
});
enhancedSync.get('/status', async c => {
try {
const queueManager = container.queue;
if (!queueManager) {
return c.json({ success: false, error: 'Queue manager not available' }, 503);
}
const symbolsQueue = queueManager.getQueue('symbols');
const job = await symbolsQueue.addJob('sync-status', {
handler: 'symbols',
operation: 'sync-status',
payload: {},
});
return c.json({ success: true, jobId: job.id, message: 'Sync status job queued' });
} catch (error) {
logger.error('Failed to queue sync status job', { error });
return c.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
500
);
}
});
enhancedSync.post('/clear/postgresql', async c => {
try {
const dataType = c.req.query('type') as 'exchanges' | 'provider_mappings' | 'all';
const queueManager = container.queue;
if (!queueManager) {
return c.json({ success: false, error: 'Queue manager not available' }, 503);
}
const exchangesQueue = queueManager.getQueue('exchanges');
const job = await exchangesQueue.addJob('clear-postgresql-data', {
handler: 'exchanges',
operation: 'clear-postgresql-data',
payload: { dataType: dataType || 'all' },
});
return c.json({
success: true,
jobId: job.id,
message: 'PostgreSQL data clear job queued',
});
} catch (error) {
logger.error('Failed to queue PostgreSQL clear job', { error });
return c.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
500
);
}
});
return enhancedSync;
}
// Legacy export for backward compatibility
export const enhancedSyncRoutes = createEnhancedSyncRoutes({} as IServiceContainer);

View file

@ -0,0 +1,14 @@
import { Hono } from 'hono';
const health = new Hono();
// Basic health check endpoint
health.get('/', c => {
return c.json({
status: 'healthy',
service: 'data-pipeline',
timestamp: new Date().toISOString(),
});
});
export { health as healthRoutes };

View file

@ -0,0 +1,5 @@
// Export all route modules
export { healthRoutes } from './health.routes';
export { syncRoutes } from './sync.routes';
export { enhancedSyncRoutes } from './enhanced-sync.routes';
export { statsRoutes } from './stats.routes';

View file

@ -0,0 +1,63 @@
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
const logger = getLogger('stats-routes');
export function createStatsRoutes(container: IServiceContainer) {
const stats = new Hono();
// Statistics endpoints
stats.get('/exchanges', async c => {
try {
const queueManager = container.queue;
if (!queueManager) {
return c.json({ error: 'Queue manager not available' }, 503);
}
const exchangesQueue = queueManager.getQueue('exchanges');
const job = await exchangesQueue.addJob('get-exchange-stats', {
handler: 'exchanges',
operation: 'get-exchange-stats',
payload: {},
});
// Wait for job to complete and return result
const result = await job.waitUntilFinished();
return c.json(result);
} catch (error) {
logger.error('Failed to get exchange stats', { error });
return c.json({ error: error instanceof Error ? error.message : 'Unknown error' }, 500);
}
});
stats.get('/provider-mappings', async c => {
try {
const queueManager = container.queue;
if (!queueManager) {
return c.json({ error: 'Queue manager not available' }, 503);
}
const exchangesQueue = queueManager.getQueue('exchanges');
const job = await exchangesQueue.addJob('get-provider-mapping-stats', {
handler: 'exchanges',
operation: 'get-provider-mapping-stats',
payload: {},
});
// Wait for job to complete and return result
const result = await job.waitUntilFinished();
return c.json(result);
} catch (error) {
logger.error('Failed to get provider mapping stats', { error });
return c.json({ error: error instanceof Error ? error.message : 'Unknown error' }, 500);
}
});
return stats;
}
// Legacy export for backward compatibility
export const statsRoutes = createStatsRoutes({} as IServiceContainer);

View file

@ -0,0 +1,95 @@
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
const logger = getLogger('sync-routes');
export function createSyncRoutes(container: IServiceContainer) {
const sync = new Hono();
// Manual sync trigger endpoints
sync.post('/symbols', async c => {
try {
const queueManager = container.queue;
if (!queueManager) {
return c.json({ success: false, error: 'Queue manager not available' }, 503);
}
const symbolsQueue = queueManager.getQueue('symbols');
const job = await symbolsQueue.addJob('sync-qm-symbols', {
handler: 'symbols',
operation: 'sync-qm-symbols',
payload: {},
});
return c.json({ success: true, jobId: job.id, message: 'QM symbols sync job queued' });
} catch (error) {
logger.error('Failed to queue symbol sync job', { error });
return c.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
500
);
}
});
sync.post('/exchanges', async c => {
try {
const queueManager = container.queue;
if (!queueManager) {
return c.json({ success: false, error: 'Queue manager not available' }, 503);
}
const exchangesQueue = queueManager.getQueue('exchanges');
const job = await exchangesQueue.addJob('sync-qm-exchanges', {
handler: 'exchanges',
operation: 'sync-qm-exchanges',
payload: {},
});
return c.json({ success: true, jobId: job.id, message: 'QM exchanges sync job queued' });
} catch (error) {
logger.error('Failed to queue exchange sync job', { error });
return c.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
500
);
}
});
sync.post('/symbols/:provider', async c => {
try {
const provider = c.req.param('provider');
const queueManager = container.queue;
if (!queueManager) {
return c.json({ success: false, error: 'Queue manager not available' }, 503);
}
const symbolsQueue = queueManager.getQueue('symbols');
const job = await symbolsQueue.addJob('sync-symbols-from-provider', {
handler: 'symbols',
operation: 'sync-symbols-from-provider',
payload: { provider },
});
return c.json({
success: true,
jobId: job.id,
message: `${provider} symbols sync job queued`,
});
} catch (error) {
logger.error('Failed to queue provider symbol sync job', { error });
return c.json(
{ success: false, error: error instanceof Error ? error.message : 'Unknown error' },
500
);
}
});
return sync;
}
// Legacy export for backward compatibility
export const syncRoutes = createSyncRoutes({} as IServiceContainer);

View file

@ -0,0 +1,27 @@
export interface JobPayload {
[key: string]: any;
}
export interface SyncResult {
processed: number;
created: number;
updated: number;
skipped: number;
errors: number;
}
export interface SyncStatus {
provider: string;
dataType: string;
lastSyncAt?: Date;
lastSyncCount: number;
syncErrors?: string;
}
export interface ExchangeMapping {
id: string;
code: string;
name: string;
country: string;
currency: string;
}

View file

@ -0,0 +1,14 @@
{
"extends": "../../tsconfig.app.json",
"references": [
{ "path": "../../libs/core/types" },
{ "path": "../../libs/core/config" },
{ "path": "../../libs/core/logger" },
{ "path": "../../libs/data/cache" },
{ "path": "../../libs/services/queue" },
{ "path": "../../libs/data/mongodb" },
{ "path": "../../libs/data/postgres" },
{ "path": "../../libs/data/questdb" },
{ "path": "../../libs/services/shutdown" }
]
}

View file

@ -0,0 +1,72 @@
module.exports = {
apps: [
{
name: 'stock-ingestion',
script: './data-ingestion/dist/index.js',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
PORT: 2001
},
env_development: {
NODE_ENV: 'development',
PORT: 2001
}
},
{
name: 'stock-pipeline',
script: './data-pipeline/dist/index.js',
instances: 1,
autorestart: true,
watch: false,
max_memory_restart: '1G',
env: {
NODE_ENV: 'production',
PORT: 2002
},
env_development: {
NODE_ENV: 'development',
PORT: 2002
}
},
{
name: 'stock-api',
script: './web-api/dist/index.js',
instances: 2,
autorestart: true,
watch: false,
max_memory_restart: '1G',
exec_mode: 'cluster',
env: {
NODE_ENV: 'production',
PORT: 2003
},
env_development: {
NODE_ENV: 'development',
PORT: 2003
}
}
],
deploy: {
production: {
user: 'deploy',
host: 'production-server',
ref: 'origin/master',
repo: 'git@github.com:username/stock-bot.git',
path: '/var/www/stock-bot',
'post-deploy': 'cd apps/stock && npm install && npm run build && pm2 reload ecosystem.config.js --env production'
},
staging: {
user: 'deploy',
host: 'staging-server',
ref: 'origin/develop',
repo: 'git@github.com:username/stock-bot.git',
path: '/var/www/stock-bot-staging',
'post-deploy': 'cd apps/stock && npm install && npm run build && pm2 reload ecosystem.config.js --env development'
}
}
};

91
apps/stock/package.json Normal file
View file

@ -0,0 +1,91 @@
{
"name": "@stock-bot/stock-app",
"version": "1.0.0",
"private": true,
"description": "Stock trading bot application",
"scripts": {
"dev": "turbo run dev",
"dev:ingestion": "cd data-ingestion && bun run dev",
"dev:pipeline": "cd data-pipeline && bun run dev",
"dev:api": "cd web-api && bun run dev",
"dev:web": "cd web-app && bun run dev",
"dev:backend": "turbo run dev --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-api\"",
"dev:frontend": "turbo run dev --filter=\"@stock-bot/web-app\"",
"build": "turbo run build",
"build:config": "cd config && bun run build",
"build:services": "turbo run build --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-*\"",
"build:ingestion": "cd data-ingestion && bun run build",
"build:pipeline": "cd data-pipeline && bun run build",
"build:api": "cd web-api && bun run build",
"build:web": "cd web-app && bun run build",
"start": "turbo run start --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-api\"",
"start:all": "turbo run start",
"start:ingestion": "cd data-ingestion && bun start",
"start:pipeline": "cd data-pipeline && bun start",
"start:api": "cd web-api && bun start",
"clean": "turbo run clean",
"clean:all": "turbo run clean && rm -rf node_modules",
"clean:ingestion": "cd data-ingestion && rm -rf dist node_modules",
"clean:pipeline": "cd data-pipeline && rm -rf dist node_modules",
"clean:api": "cd web-api && rm -rf dist node_modules",
"clean:web": "cd web-app && rm -rf dist node_modules",
"clean:config": "cd config && rm -rf dist node_modules",
"test": "turbo run test",
"test:all": "turbo run test",
"test:config": "cd config && bun test",
"test:services": "turbo run test --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-*\"",
"test:ingestion": "cd data-ingestion && bun test",
"test:pipeline": "cd data-pipeline && bun test",
"test:api": "cd web-api && bun test",
"lint": "turbo run lint",
"lint:all": "turbo run lint",
"lint:config": "cd config && bun run lint",
"lint:services": "turbo run lint --filter=\"@stock-bot/data-*\" --filter=\"@stock-bot/web-*\"",
"lint:ingestion": "cd data-ingestion && bun run lint",
"lint:pipeline": "cd data-pipeline && bun run lint",
"lint:api": "cd web-api && bun run lint",
"lint:web": "cd web-app && bun run lint",
"install:all": "bun install",
"docker:build": "docker-compose build",
"docker:up": "docker-compose up",
"docker:down": "docker-compose down",
"pm2:start": "pm2 start ecosystem.config.js",
"pm2:stop": "pm2 stop all",
"pm2:restart": "pm2 restart all",
"pm2:logs": "pm2 logs",
"pm2:status": "pm2 status",
"db:migrate": "cd data-ingestion && bun run db:migrate",
"db:seed": "cd data-ingestion && bun run db:seed",
"health:check": "bun scripts/health-check.js",
"monitor": "bun run pm2:logs",
"status": "bun run pm2:status"
},
"devDependencies": {
"pm2": "^5.3.0",
"@types/node": "^20.11.0",
"typescript": "^5.3.3",
"turbo": "^2.5.4"
},
"workspaces": [
"config",
"data-ingestion",
"data-pipeline",
"web-api",
"web-app"
],
"engines": {
"node": ">=18.0.0",
"bun": ">=1.1.0"
},
"packageManager": "bun@1.1.12"
}

View file

@ -0,0 +1,60 @@
#!/usr/bin/env node
const http = require('http');
const services = [
{ name: 'Data Ingestion', port: 2001 },
{ name: 'Data Pipeline', port: 2002 },
{ name: 'Web API', port: 2003 },
];
console.log('🏥 Stock Bot Health Check\n');
async function checkService(service) {
return new Promise((resolve) => {
const options = {
hostname: 'localhost',
port: service.port,
path: '/health',
method: 'GET',
timeout: 5000,
};
const req = http.request(options, (res) => {
if (res.statusCode === 200) {
resolve({ ...service, status: '✅ Healthy', code: res.statusCode });
} else {
resolve({ ...service, status: '⚠️ Unhealthy', code: res.statusCode });
}
});
req.on('error', (err) => {
resolve({ ...service, status: '❌ Offline', error: err.message });
});
req.on('timeout', () => {
req.destroy();
resolve({ ...service, status: '⏱️ Timeout', error: 'Request timed out' });
});
req.end();
});
}
async function checkAllServices() {
const results = await Promise.all(services.map(checkService));
results.forEach((result) => {
console.log(`${result.name.padEnd(15)} ${result.status}`);
if (result.error) {
console.log(` ${result.error}`);
}
});
const allHealthy = results.every(r => r.status === '✅ Healthy');
console.log('\n' + (allHealthy ? '✅ All services are healthy!' : '⚠️ Some services need attention'));
process.exit(allHealthy ? 0 : 1);
}
checkAllServices();

18
apps/stock/tsconfig.json Normal file
View file

@ -0,0 +1,18 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"baseUrl": "../..",
"paths": {
"@stock-bot/*": ["libs/*/src"],
"@stock-bot/stock-config": ["apps/stock/config/src"],
"@stock-bot/stock-config/*": ["apps/stock/config/src/*"]
}
},
"references": [
{ "path": "./config" },
{ "path": "./data-ingestion" },
{ "path": "./data-pipeline" },
{ "path": "./web-api" },
{ "path": "./web-app" }
]
}

View file

@ -0,0 +1,26 @@
{
"name": "@stock-bot/web-api",
"version": "1.0.0",
"description": "REST API service for stock bot web application",
"main": "dist/index.js",
"type": "module",
"scripts": {
"dev": "bun --watch src/index.ts",
"build": "bun build src/index.ts --outdir dist --target node --external chromium-bidi --external electron --external playwright --external playwright-core",
"start": "bun dist/index.js",
"test": "bun test",
"clean": "rm -rf dist"
},
"dependencies": {
"@stock-bot/config": "*",
"@stock-bot/stock-config": "*",
"@stock-bot/logger": "*",
"@stock-bot/mongodb": "*",
"@stock-bot/postgres": "*",
"@stock-bot/shutdown": "*",
"hono": "^4.0.0"
},
"devDependencies": {
"typescript": "^5.0.0"
}
}

View file

@ -0,0 +1,34 @@
/**
* Service Container Setup for Web API
* Configures dependency injection for the web API service
*/
import type { IServiceContainer } from '@stock-bot/handlers';
import { getLogger } from '@stock-bot/logger';
import type { AppConfig } from '@stock-bot/config';
const logger = getLogger('web-api-container');
/**
* Configure the service container for web API workloads
*/
export function setupServiceContainer(
config: AppConfig,
container: IServiceContainer
): IServiceContainer {
logger.info('Configuring web API service container...');
// Web API specific configuration
// This service mainly reads data, so smaller pool sizes are fine
const poolSizes = {
mongodb: config.environment === 'production' ? 20 : 10,
postgres: config.environment === 'production' ? 30 : 15,
cache: config.environment === 'production' ? 20 : 10,
};
logger.info('Web API pool sizes configured', poolSizes);
// The container is already configured with connections
// Just return it with our logging
return container;
}

View file

@ -0,0 +1,78 @@
/**
* Stock Bot Web API
* Simplified entry point using ServiceApplication framework
*/
import { initializeStockConfig } from '@stock-bot/stock-config';
import {
ServiceApplication,
createServiceContainerFromConfig,
initializeServices as initializeAwilixServices,
} from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
// Local imports
import { createRoutes } from './routes/create-routes';
import { setupServiceContainer } from './container-setup';
// Initialize configuration with service-specific overrides
const config = initializeStockConfig('webApi');
console.log('Web API Service Configuration:', JSON.stringify(config, null, 2));
// Create service application
const app = new ServiceApplication(
config,
{
serviceName: 'web-api',
enableHandlers: false, // Web API doesn't use handlers
enableScheduledJobs: false, // Web API doesn't use scheduled jobs
corsConfig: {
origin: ['http://localhost:4200', 'http://localhost:3000', 'http://localhost:3002'],
allowMethods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
allowHeaders: ['Content-Type', 'Authorization'],
credentials: true,
},
serviceMetadata: {
version: '1.0.0',
description: 'Stock Bot REST API',
endpoints: {
health: '/health',
exchanges: '/api/exchanges',
},
},
},
{
// Custom lifecycle hooks
onContainerReady: (container) => {
// Setup service-specific configuration
const enhancedContainer = setupServiceContainer(config, container);
return enhancedContainer;
},
onStarted: (port) => {
const logger = getLogger('web-api');
logger.info('Web API service startup initiated with ServiceApplication framework');
},
}
);
// Container factory function
async function createContainer(config: any) {
const container = createServiceContainerFromConfig(config, {
enableQuestDB: false, // Web API doesn't need QuestDB
enableMongoDB: true,
enablePostgres: true,
enableCache: true,
enableQueue: false, // Web API doesn't need queue processing
enableBrowser: false, // Web API doesn't need browser
enableProxy: false, // Web API doesn't need proxy
});
await initializeAwilixServices(container);
return container;
}
// Start the service
app.start(createContainer, createRoutes).catch(error => {
const logger = getLogger('web-api');
logger.fatal('Failed to start web API service', { error });
process.exit(1);
});

View file

@ -0,0 +1,23 @@
/**
* Route factory for web API service
* Creates routes with access to the service container
*/
import { Hono } from 'hono';
import type { IServiceContainer } from '@stock-bot/handlers';
import { createHealthRoutes } from './health.routes';
import { createExchangeRoutes } from './exchange.routes';
export function createRoutes(container: IServiceContainer): Hono {
const app = new Hono();
// Create routes with container
const healthRoutes = createHealthRoutes(container);
const exchangeRoutes = createExchangeRoutes(container);
// Mount routes
app.route('/health', healthRoutes);
app.route('/api/exchanges', exchangeRoutes);
return app;
}

View file

@ -0,0 +1,262 @@
/**
* Exchange management routes - Refactored
*/
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import { createExchangeService } from '../services/exchange.service';
import { createSuccessResponse, handleError } from '../utils/error-handler';
import {
validateCreateExchange,
validateCreateProviderMapping,
validateUpdateExchange,
validateUpdateProviderMapping,
} from '../utils/validation';
const logger = getLogger('exchange-routes');
export function createExchangeRoutes(container: IServiceContainer) {
const exchangeRoutes = new Hono();
const exchangeService = createExchangeService(container);
// Get all exchanges with provider mapping counts and mappings
exchangeRoutes.get('/', async c => {
logger.debug('Getting all exchanges');
try {
const exchanges = await exchangeService.getAllExchanges();
logger.info('Successfully retrieved exchanges', { count: exchanges.length });
return c.json(createSuccessResponse(exchanges, undefined, exchanges.length));
} catch (error) {
logger.error('Failed to get exchanges', { error });
return handleError(c, error, 'to get exchanges');
}
});
// Get exchange by ID with detailed provider mappings
exchangeRoutes.get('/:id', async c => {
const exchangeId = c.req.param('id');
logger.debug('Getting exchange by ID', { exchangeId });
try {
const result = await exchangeService.getExchangeById(exchangeId);
if (!result) {
logger.warn('Exchange not found', { exchangeId });
return c.json(createSuccessResponse(null, 'Exchange not found'), 404);
}
logger.info('Successfully retrieved exchange details', {
exchangeId,
exchangeCode: result.exchange.code,
mappingCount: result.provider_mappings.length,
});
return c.json(createSuccessResponse(result));
} catch (error) {
logger.error('Failed to get exchange details', { error, exchangeId });
return handleError(c, error, 'to get exchange details');
}
});
// Create new exchange
exchangeRoutes.post('/', async c => {
logger.debug('Creating new exchange');
try {
const body = await c.req.json();
logger.debug('Received exchange creation request', { requestBody: body });
const validatedData = validateCreateExchange(body);
logger.debug('Exchange data validated successfully', { validatedData });
const exchange = await exchangeService.createExchange(validatedData);
logger.info('Exchange created successfully', {
exchangeId: exchange.id,
code: exchange.code,
name: exchange.name,
});
return c.json(createSuccessResponse(exchange, 'Exchange created successfully'), 201);
} catch (error) {
logger.error('Failed to create exchange', { error });
return handleError(c, error, 'to create exchange');
}
});
// Update exchange (activate/deactivate, rename, etc.)
exchangeRoutes.patch('/:id', async c => {
const exchangeId = c.req.param('id');
logger.debug('Updating exchange', { exchangeId });
try {
const body = await c.req.json();
logger.debug('Received exchange update request', { exchangeId, updates: body });
const validatedUpdates = validateUpdateExchange(body);
logger.debug('Exchange update data validated', { exchangeId, validatedUpdates });
const exchange = await exchangeService.updateExchange(exchangeId, validatedUpdates);
if (!exchange) {
logger.warn('Exchange not found for update', { exchangeId });
return c.json(createSuccessResponse(null, 'Exchange not found'), 404);
}
logger.info('Exchange updated successfully', {
exchangeId,
code: exchange.code,
updates: validatedUpdates,
});
// Log special actions
if (validatedUpdates.visible === false) {
logger.warn('Exchange marked as hidden - provider mappings will be deleted', {
exchangeId,
code: exchange.code,
});
}
return c.json(createSuccessResponse(exchange, 'Exchange updated successfully'));
} catch (error) {
logger.error('Failed to update exchange', { error, exchangeId });
return handleError(c, error, 'to update exchange');
}
});
// Get all provider mappings
exchangeRoutes.get('/provider-mappings/all', async c => {
logger.debug('Getting all provider mappings');
try {
const mappings = await exchangeService.getAllProviderMappings();
logger.info('Successfully retrieved all provider mappings', { count: mappings.length });
return c.json(createSuccessResponse(mappings, undefined, mappings.length));
} catch (error) {
logger.error('Failed to get provider mappings', { error });
return handleError(c, error, 'to get provider mappings');
}
});
// Get provider mappings by provider
exchangeRoutes.get('/provider-mappings/:provider', async c => {
const provider = c.req.param('provider');
logger.debug('Getting provider mappings by provider', { provider });
try {
const mappings = await exchangeService.getProviderMappingsByProvider(provider);
logger.info('Successfully retrieved provider mappings', { provider, count: mappings.length });
return c.json(createSuccessResponse(mappings, undefined, mappings.length));
} catch (error) {
logger.error('Failed to get provider mappings', { error, provider });
return handleError(c, error, 'to get provider mappings');
}
});
// Update provider mapping (activate/deactivate, verify, change confidence)
exchangeRoutes.patch('/provider-mappings/:id', async c => {
const mappingId = c.req.param('id');
logger.debug('Updating provider mapping', { mappingId });
try {
const body = await c.req.json();
logger.debug('Received provider mapping update request', { mappingId, updates: body });
const validatedUpdates = validateUpdateProviderMapping(body);
logger.debug('Provider mapping update data validated', { mappingId, validatedUpdates });
const mapping = await exchangeService.updateProviderMapping(mappingId, validatedUpdates);
if (!mapping) {
logger.warn('Provider mapping not found for update', { mappingId });
return c.json(createSuccessResponse(null, 'Provider mapping not found'), 404);
}
logger.info('Provider mapping updated successfully', {
mappingId,
provider: mapping.provider,
providerExchangeCode: mapping.provider_exchange_code,
updates: validatedUpdates,
});
return c.json(createSuccessResponse(mapping, 'Provider mapping updated successfully'));
} catch (error) {
logger.error('Failed to update provider mapping', { error, mappingId });
return handleError(c, error, 'to update provider mapping');
}
});
// Create new provider mapping
exchangeRoutes.post('/provider-mappings', async c => {
logger.debug('Creating new provider mapping');
try {
const body = await c.req.json();
logger.debug('Received provider mapping creation request', { requestBody: body });
const validatedData = validateCreateProviderMapping(body);
logger.debug('Provider mapping data validated successfully', { validatedData });
const mapping = await exchangeService.createProviderMapping(validatedData);
logger.info('Provider mapping created successfully', {
mappingId: mapping.id,
provider: mapping.provider,
providerExchangeCode: mapping.provider_exchange_code,
masterExchangeId: mapping.master_exchange_id,
});
return c.json(createSuccessResponse(mapping, 'Provider mapping created successfully'), 201);
} catch (error) {
logger.error('Failed to create provider mapping', { error });
return handleError(c, error, 'to create provider mapping');
}
});
// Get all available providers
exchangeRoutes.get('/providers/list', async c => {
logger.debug('Getting providers list');
try {
const providers = await exchangeService.getProviders();
logger.info('Successfully retrieved providers list', { count: providers.length, providers });
return c.json(createSuccessResponse(providers));
} catch (error) {
logger.error('Failed to get providers list', { error });
return handleError(c, error, 'to get providers list');
}
});
// Get unmapped provider exchanges by provider
exchangeRoutes.get('/provider-exchanges/unmapped/:provider', async c => {
const provider = c.req.param('provider');
logger.debug('Getting unmapped provider exchanges', { provider });
try {
const exchanges = await exchangeService.getUnmappedProviderExchanges(provider);
logger.info('Successfully retrieved unmapped provider exchanges', {
provider,
count: exchanges.length,
});
return c.json(createSuccessResponse(exchanges, undefined, exchanges.length));
} catch (error) {
logger.error('Failed to get unmapped provider exchanges', { error, provider });
return handleError(c, error, 'to get unmapped provider exchanges');
}
});
// Get exchange statistics
exchangeRoutes.get('/stats/summary', async c => {
logger.debug('Getting exchange statistics');
try {
const stats = await exchangeService.getExchangeStats();
logger.info('Successfully retrieved exchange statistics', { stats });
return c.json(createSuccessResponse(stats));
} catch (error) {
logger.error('Failed to get exchange statistics', { error });
return handleError(c, error, 'to get exchange statistics');
}
});
return exchangeRoutes;
}

View file

@ -0,0 +1,111 @@
/**
* Health check routes factory
*/
import { Hono } from 'hono';
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
const logger = getLogger('health-routes');
export function createHealthRoutes(container: IServiceContainer) {
const healthRoutes = new Hono();
// Basic health check
healthRoutes.get('/', c => {
logger.debug('Basic health check requested');
const response = {
status: 'healthy',
service: 'web-api',
timestamp: new Date().toISOString(),
};
logger.info('Basic health check successful', { status: response.status });
return c.json(response);
});
// Detailed health check with database connectivity
healthRoutes.get('/detailed', async c => {
logger.debug('Detailed health check requested');
const health = {
status: 'healthy',
service: 'web-api',
timestamp: new Date().toISOString(),
checks: {
mongodb: { status: 'unknown', message: '' },
postgresql: { status: 'unknown', message: '' },
},
};
// Check MongoDB
logger.debug('Checking MongoDB connectivity');
try {
const mongoClient = container.mongodb;
if (mongoClient && mongoClient.connected) {
// Try a simple operation
const db = mongoClient.getDatabase();
await db.admin().ping();
health.checks.mongodb = { status: 'healthy', message: 'Connected and responsive' };
logger.debug('MongoDB health check passed');
} else {
health.checks.mongodb = { status: 'unhealthy', message: 'Not connected' };
logger.warn('MongoDB health check failed - not connected');
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
health.checks.mongodb = {
status: 'unhealthy',
message: errorMessage,
};
logger.error('MongoDB health check failed', { error: errorMessage });
}
// Check PostgreSQL
logger.debug('Checking PostgreSQL connectivity');
try {
const postgresClient = container.postgres;
if (postgresClient) {
await postgresClient.query('SELECT 1');
health.checks.postgresql = { status: 'healthy', message: 'Connected and responsive' };
logger.debug('PostgreSQL health check passed');
} else {
health.checks.postgresql = { status: 'unhealthy', message: 'PostgreSQL client not available' };
logger.warn('PostgreSQL health check failed - client not available');
}
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
health.checks.postgresql = {
status: 'unhealthy',
message: errorMessage,
};
logger.error('PostgreSQL health check failed', { error: errorMessage });
}
// Overall status
const allHealthy = Object.values(health.checks).every(check => check.status === 'healthy');
health.status = allHealthy ? 'healthy' : 'unhealthy';
const statusCode = allHealthy ? 200 : 503;
if (allHealthy) {
logger.info('Detailed health check successful - all systems healthy', {
mongodb: health.checks.mongodb.status,
postgresql: health.checks.postgresql.status,
});
} else {
logger.warn('Detailed health check failed - some systems unhealthy', {
mongodb: health.checks.mongodb.status,
postgresql: health.checks.postgresql.status,
overallStatus: health.status,
});
}
return c.json(health, statusCode);
});
return healthRoutes;
}
// Export legacy routes for backward compatibility during migration
export const healthRoutes = createHealthRoutes({} as IServiceContainer);

View file

@ -0,0 +1,5 @@
/**
* Routes index - exports all route modules
*/
export { createExchangeRoutes } from './exchange.routes';
export { healthRoutes } from './health.routes';

View file

@ -0,0 +1,383 @@
import { getLogger } from '@stock-bot/logger';
import type { IServiceContainer } from '@stock-bot/handlers';
import {
CreateExchangeRequest,
CreateProviderMappingRequest,
Exchange,
ExchangeStats,
ExchangeWithMappings,
ProviderExchange,
ProviderMapping,
UpdateExchangeRequest,
UpdateProviderMappingRequest,
} from '../types/exchange.types';
const logger = getLogger('exchange-service');
export class ExchangeService {
constructor(private container: IServiceContainer) {}
private get postgresClient() {
return this.container.postgres;
}
private get mongoClient() {
return this.container.mongodb;
}
// Exchanges
async getAllExchanges(): Promise<ExchangeWithMappings[]> {
const exchangesQuery = `
SELECT
e.id,
e.code,
e.name,
e.country,
e.currency,
e.active,
e.visible,
e.created_at,
e.updated_at,
COUNT(pem.id) as provider_mapping_count,
COUNT(CASE WHEN pem.active = true THEN 1 END) as active_mapping_count,
COUNT(CASE WHEN pem.verified = true THEN 1 END) as verified_mapping_count,
STRING_AGG(DISTINCT pem.provider, ', ') as providers
FROM exchanges e
LEFT JOIN provider_exchange_mappings pem ON e.id = pem.master_exchange_id
WHERE e.visible = true
GROUP BY e.id, e.code, e.name, e.country, e.currency, e.active, e.visible, e.created_at, e.updated_at
ORDER BY e.code
`;
const exchangesResult = await this.postgresClient.query(exchangesQuery);
// Get all provider mappings
const mappingsQuery = `
SELECT
pem.*,
e.code as master_exchange_code,
e.name as master_exchange_name
FROM provider_exchange_mappings pem
JOIN exchanges e ON pem.master_exchange_id = e.id
WHERE e.visible = true
ORDER BY pem.master_exchange_id, pem.provider, pem.provider_exchange_code
`;
const mappingsResult = await this.postgresClient.query(mappingsQuery);
// Group mappings by exchange ID
const mappingsByExchange = mappingsResult.rows.reduce(
(acc, mapping) => {
const exchangeId = mapping.master_exchange_id;
if (!acc[exchangeId]) {
acc[exchangeId] = [];
}
acc[exchangeId].push(mapping);
return acc;
},
{} as Record<string, ProviderMapping[]>
);
// Attach mappings to exchanges
return exchangesResult.rows.map(exchange => ({
...exchange,
provider_mappings: mappingsByExchange[exchange.id] || [],
}));
}
async getExchangeById(
id: string
): Promise<{ exchange: Exchange; provider_mappings: ProviderMapping[] } | null> {
const exchangeQuery = 'SELECT * FROM exchanges WHERE id = $1 AND visible = true';
const exchangeResult = await this.postgresClient.query(exchangeQuery, [id]);
if (exchangeResult.rows.length === 0) {
return null;
}
const mappingsQuery = `
SELECT
pem.*,
e.code as master_exchange_code,
e.name as master_exchange_name
FROM provider_exchange_mappings pem
JOIN exchanges e ON pem.master_exchange_id = e.id
WHERE pem.master_exchange_id = $1
ORDER BY pem.provider, pem.provider_exchange_code
`;
const mappingsResult = await this.postgresClient.query(mappingsQuery, [id]);
return {
exchange: exchangeResult.rows[0],
provider_mappings: mappingsResult.rows,
};
}
async createExchange(data: CreateExchangeRequest): Promise<Exchange> {
const query = `
INSERT INTO exchanges (code, name, country, currency, active, visible)
VALUES ($1, $2, $3, $4, $5, true)
RETURNING *
`;
const result = await this.postgresClient.query(query, [
data.code,
data.name,
data.country,
data.currency,
data.active,
]);
logger.info('Exchange created', {
exchangeId: result.rows[0].id,
code: data.code,
name: data.name,
});
return result.rows[0];
}
async updateExchange(id: string, updates: UpdateExchangeRequest): Promise<Exchange | null> {
const updateFields = [];
const values = [];
let paramIndex = 1;
Object.entries(updates).forEach(([key, value]) => {
updateFields.push(`${key} = $${paramIndex++}`);
values.push(value);
});
updateFields.push(`updated_at = NOW()`);
values.push(id);
const query = `
UPDATE exchanges
SET ${updateFields.join(', ')}
WHERE id = $${paramIndex}
RETURNING *
`;
const result = await this.postgresClient.query(query, values);
if (result.rows.length === 0) {
return null;
}
// If hiding an exchange, delete its provider mappings
if (updates.visible === false) {
await this.deleteProviderMappingsByExchangeId(id);
}
logger.info('Exchange updated', { exchangeId: id, updates });
return result.rows[0];
}
// Provider Mappings
async getAllProviderMappings(): Promise<ProviderMapping[]> {
const query = `
SELECT
pem.*,
e.code as master_exchange_code,
e.name as master_exchange_name,
e.active as master_exchange_active
FROM provider_exchange_mappings pem
JOIN exchanges e ON pem.master_exchange_id = e.id
WHERE e.visible = true
ORDER BY pem.provider, pem.provider_exchange_code
`;
const result = await this.postgresClient.query(query);
return result.rows;
}
async getProviderMappingsByProvider(provider: string): Promise<ProviderMapping[]> {
const query = `
SELECT
pem.*,
e.code as master_exchange_code,
e.name as master_exchange_name,
e.active as master_exchange_active
FROM provider_exchange_mappings pem
JOIN exchanges e ON pem.master_exchange_id = e.id
WHERE pem.provider = $1 AND e.visible = true
ORDER BY pem.provider_exchange_code
`;
const result = await this.postgresClient.query(query, [provider]);
return result.rows;
}
async createProviderMapping(data: CreateProviderMappingRequest): Promise<ProviderMapping> {
const query = `
INSERT INTO provider_exchange_mappings
(provider, provider_exchange_code, provider_exchange_name, master_exchange_id,
country_code, currency, confidence, active, verified, auto_mapped)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, false)
RETURNING *
`;
const result = await this.postgresClient.query(query, [
data.provider,
data.provider_exchange_code,
data.provider_exchange_name,
data.master_exchange_id,
data.country_code,
data.currency,
data.confidence,
data.active,
data.verified,
]);
logger.info('Provider mapping created', {
provider: data.provider,
provider_exchange_code: data.provider_exchange_code,
master_exchange_id: data.master_exchange_id,
});
return result.rows[0];
}
async updateProviderMapping(
id: string,
updates: UpdateProviderMappingRequest
): Promise<ProviderMapping | null> {
const updateFields = [];
const values = [];
let paramIndex = 1;
Object.entries(updates).forEach(([key, value]) => {
updateFields.push(`${key} = $${paramIndex++}`);
values.push(value);
});
updateFields.push(`updated_at = NOW()`);
updateFields.push(`auto_mapped = false`); // Mark as manually managed
values.push(id);
const query = `
UPDATE provider_exchange_mappings
SET ${updateFields.join(', ')}
WHERE id = $${paramIndex}
RETURNING *
`;
const result = await this.postgresClient.query(query, values);
if (result.rows.length === 0) {
return null;
}
logger.info('Provider mapping updated', { mappingId: id, updates });
return result.rows[0];
}
async deleteProviderMappingsByExchangeId(exchangeId: string): Promise<number> {
const query = 'DELETE FROM provider_exchange_mappings WHERE master_exchange_id = $1';
const result = await this.postgresClient.query(query, [exchangeId]);
logger.info('Deleted provider mappings for hidden exchange', {
exchangeId,
deletedMappings: result.rowCount,
});
return result.rowCount || 0;
}
// Providers and Statistics
async getProviders(): Promise<string[]> {
const query = `
SELECT DISTINCT provider
FROM provider_exchange_mappings
ORDER BY provider
`;
const result = await this.postgresClient.query(query);
return result.rows.map(row => row.provider);
}
async getExchangeStats(): Promise<ExchangeStats> {
const query = `
SELECT
(SELECT COUNT(*) FROM exchanges WHERE visible = true) as total_exchanges,
(SELECT COUNT(*) FROM exchanges WHERE active = true AND visible = true) as active_exchanges,
(SELECT COUNT(DISTINCT country) FROM exchanges WHERE visible = true) as countries,
(SELECT COUNT(DISTINCT currency) FROM exchanges WHERE visible = true) as currencies,
(SELECT COUNT(*) FROM provider_exchange_mappings pem JOIN exchanges e ON pem.master_exchange_id = e.id WHERE e.visible = true) as total_provider_mappings,
(SELECT COUNT(*) FROM provider_exchange_mappings pem JOIN exchanges e ON pem.master_exchange_id = e.id WHERE pem.active = true AND e.visible = true) as active_provider_mappings,
(SELECT COUNT(*) FROM provider_exchange_mappings pem JOIN exchanges e ON pem.master_exchange_id = e.id WHERE pem.verified = true AND e.visible = true) as verified_provider_mappings,
(SELECT COUNT(DISTINCT provider) FROM provider_exchange_mappings pem JOIN exchanges e ON pem.master_exchange_id = e.id WHERE e.visible = true) as providers
`;
const result = await this.postgresClient.query(query);
return result.rows[0];
}
async getUnmappedProviderExchanges(provider: string): Promise<ProviderExchange[]> {
// Get existing mappings for this provider
const existingMappingsQuery = `
SELECT provider_exchange_code
FROM provider_exchange_mappings
WHERE provider = $1
`;
const existingMappings = await this.postgresClient.query(existingMappingsQuery, [provider]);
const mappedCodes = new Set(existingMappings.rows.map(row => row.provider_exchange_code));
const db = this.mongoClient.getDatabase();
let providerExchanges: ProviderExchange[] = [];
switch (provider) {
case 'eod': {
const eodExchanges = await db.collection('eodExchanges').find({ active: true }).toArray();
providerExchanges = eodExchanges
.filter(exchange => !mappedCodes.has(exchange.Code))
.map(exchange => ({
provider_exchange_code: exchange.Code,
provider_exchange_name: exchange.Name,
country_code: exchange.CountryISO2,
currency: exchange.Currency,
symbol_count: null,
}));
break;
}
case 'ib': {
const ibExchanges = await db.collection('ibExchanges').find({}).toArray();
providerExchanges = ibExchanges
.filter(exchange => !mappedCodes.has(exchange.exchange_id))
.map(exchange => ({
provider_exchange_code: exchange.exchange_id,
provider_exchange_name: exchange.name,
country_code: exchange.country_code,
currency: null,
symbol_count: null,
}));
break;
}
case 'qm': {
const qmExchanges = await db.collection('qmExchanges').find({}).toArray();
providerExchanges = qmExchanges
.filter(exchange => !mappedCodes.has(exchange.exchangeCode))
.map(exchange => ({
provider_exchange_code: exchange.exchangeCode,
provider_exchange_name: exchange.name,
country_code: exchange.countryCode,
currency: exchange.countryCode === 'CA' ? 'CAD' : 'USD',
symbol_count: null,
}));
break;
}
default:
throw new Error(`Unknown provider: ${provider}`);
}
return providerExchanges;
}
}
// Export function to create service instance with container
export function createExchangeService(container: IServiceContainer): ExchangeService {
return new ExchangeService(container);
}

View file

@ -0,0 +1,103 @@
export interface Exchange {
id: string;
code: string;
name: string;
country: string;
currency: string;
active: boolean;
visible: boolean;
created_at: string;
updated_at: string;
}
export interface ExchangeWithMappings extends Exchange {
provider_mapping_count: string;
active_mapping_count: string;
verified_mapping_count: string;
providers: string | null;
provider_mappings: ProviderMapping[];
}
export interface ProviderMapping {
id: string;
provider: string;
provider_exchange_code: string;
provider_exchange_name: string;
master_exchange_id: string;
country_code: string | null;
currency: string | null;
confidence: number;
active: boolean;
verified: boolean;
auto_mapped: boolean;
created_at: string;
updated_at: string;
master_exchange_code?: string;
master_exchange_name?: string;
master_exchange_active?: boolean;
}
export interface CreateExchangeRequest {
code: string;
name: string;
country: string;
currency: string;
active?: boolean;
}
export interface UpdateExchangeRequest {
name?: string;
active?: boolean;
visible?: boolean;
country?: string;
currency?: string;
}
export interface CreateProviderMappingRequest {
provider: string;
provider_exchange_code: string;
provider_exchange_name?: string;
master_exchange_id: string;
country_code?: string;
currency?: string;
confidence?: number;
active?: boolean;
verified?: boolean;
}
export interface UpdateProviderMappingRequest {
active?: boolean;
verified?: boolean;
confidence?: number;
master_exchange_id?: string;
}
export interface ProviderExchange {
provider_exchange_code: string;
provider_exchange_name: string;
country_code: string | null;
currency: string | null;
symbol_count: number | null;
is_mapped?: boolean;
mapped_to_exchange_id?: string | null;
mapped_to_exchange_code?: string | null;
}
export interface ExchangeStats {
total_exchanges: number;
active_exchanges: number;
countries: number;
currencies: number;
total_provider_mappings: number;
active_provider_mappings: number;
verified_provider_mappings: number;
providers: number;
}
export interface ApiResponse<T = unknown> {
success: boolean;
data?: T;
error?: string;
message?: string;
total?: number;
}

View file

@ -0,0 +1,64 @@
import { Context } from 'hono';
import { getLogger } from '@stock-bot/logger';
import { ApiResponse } from '../types/exchange.types';
import { ValidationError } from './validation';
const logger = getLogger('error-handler');
export function handleError(c: Context, error: unknown, operation: string): Response {
logger.error(`Failed ${operation}`, { error });
// Handle validation errors
if (error instanceof ValidationError) {
const response: ApiResponse = {
success: false,
error: error.message,
};
return c.json(response, 400);
}
// Handle database constraint violations
if (error instanceof Error && error.message.includes('duplicate key')) {
const response: ApiResponse = {
success: false,
error: 'Resource already exists with this unique identifier',
};
return c.json(response, 409);
}
// Handle not found errors
if (error instanceof Error && error.message.includes('not found')) {
const response: ApiResponse = {
success: false,
error: error.message,
};
return c.json(response, 404);
}
// Generic error response
const response: ApiResponse = {
success: false,
error: error instanceof Error ? error.message : 'Unknown error occurred',
};
return c.json(response, 500);
}
export function createSuccessResponse<T>(
data: T,
message?: string,
total?: number
): ApiResponse<T> {
const response: ApiResponse<T> = {
success: true,
data,
};
if (message) {
response.message = message;
}
if (total !== undefined) {
response.total = total;
}
return response;
}

Some files were not shown because too many files have changed in this diff Show more