di-refactor coming along

This commit is contained in:
Boki 2025-06-22 18:14:34 -04:00
parent 7d9044ab29
commit 60ada5f6a3
20 changed files with 582 additions and 335 deletions

View file

@ -4,9 +4,9 @@
*/ */
// Framework imports // Framework imports
import { initializeServiceConfig } from '@stock-bot/config';
import { Hono } from 'hono'; import { Hono } from 'hono';
import { cors } from 'hono/cors'; import { cors } from 'hono/cors';
import { initializeServiceConfig } from '@stock-bot/config';
// Library imports // Library imports
import { import {
createServiceContainer, createServiceContainer,
@ -17,8 +17,8 @@ import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger';
import { Shutdown } from '@stock-bot/shutdown'; import { Shutdown } from '@stock-bot/shutdown';
import { handlerRegistry } from '@stock-bot/types'; import { handlerRegistry } from '@stock-bot/types';
// Local imports // Local imports
import { createRoutes } from './routes/create-routes';
import { initializeAllHandlers } from './handlers'; import { initializeAllHandlers } from './handlers';
import { createRoutes } from './routes/create-routes';
const config = initializeServiceConfig(); const config = initializeServiceConfig();
console.log('Data Service Configuration:', JSON.stringify(config, null, 2)); console.log('Data Service Configuration:', JSON.stringify(config, null, 2));
@ -123,7 +123,11 @@ async function initializeServices() {
let totalScheduledJobs = 0; let totalScheduledJobs = 0;
for (const [handlerName, config] of allHandlers) { for (const [handlerName, config] of allHandlers) {
if (config.scheduledJobs && config.scheduledJobs.length > 0) { if (config.scheduledJobs && config.scheduledJobs.length > 0) {
const queueManager = container!.resolve('queueManager'); const queueManager = container.resolve('queueManager');
if(!queueManager) {
logger.error('Queue manager is not initialized, cannot create scheduled jobs');
continue;
}
const queue = queueManager.getQueue(handlerName); const queue = queueManager.getQueue(handlerName);
for (const scheduledJob of config.scheduledJobs) { for (const scheduledJob of config.scheduledJobs) {

View file

@ -1,27 +1,8 @@
import { MongoDBClient } from '@stock-bot/mongodb'; /**
import { PostgreSQLClient } from '@stock-bot/postgres'; * Client exports for backward compatibility
*
* @deprecated Use ServiceContainer parameter instead
* This file will be removed once all operations are migrated
*/
let postgresClient: PostgreSQLClient | null = null; export { getMongoDBClient, getPostgreSQLClient } from './migration-helper';
let mongodbClient: MongoDBClient | null = null;
export function setPostgreSQLClient(client: PostgreSQLClient): void {
postgresClient = client;
}
export function getPostgreSQLClient(): PostgreSQLClient {
if (!postgresClient) {
throw new Error('PostgreSQL client not initialized. Call setPostgreSQLClient first.');
}
return postgresClient;
}
export function setMongoDBClient(client: MongoDBClient): void {
mongodbClient = client;
}
export function getMongoDBClient(): MongoDBClient {
if (!mongodbClient) {
throw new Error('MongoDB client not initialized. Call setMongoDBClient first.');
}
return mongodbClient;
}

View file

@ -0,0 +1,34 @@
/**
* Service Container Setup for Data Pipeline
* Configures dependency injection for the data pipeline service
*/
import type { ServiceContainer } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
import type { AppConfig } from '@stock-bot/config';
const logger = getLogger('data-pipeline-container');
/**
* Configure the service container for data pipeline workloads
*/
export function setupServiceContainer(
config: AppConfig,
container: ServiceContainer
): ServiceContainer {
logger.info('Configuring data pipeline service container...');
// Data pipeline specific configuration
// This service does more complex queries and transformations
const poolSizes = {
mongodb: config.environment === 'production' ? 40 : 20,
postgres: config.environment === 'production' ? 50 : 25,
cache: config.environment === 'production' ? 30 : 15,
};
logger.info('Data pipeline pool sizes configured', poolSizes);
// The container is already configured with connections
// Just return it with our logging
return container;
}

View file

@ -1,5 +1,6 @@
import { getLogger } from '@stock-bot/logger'; import { getLogger } from '@stock-bot/logger';
import { handlerRegistry, type HandlerConfig, type ScheduledJobConfig } from '@stock-bot/queue'; import { handlerRegistry, createJobHandler, type HandlerConfig, type ScheduledJobConfig } from '@stock-bot/queue';
import type { ServiceContainer } from '@stock-bot/di';
import { exchangeOperations } from './operations'; import { exchangeOperations } from './operations';
const logger = getLogger('exchanges-handler'); const logger = getLogger('exchanges-handler');
@ -51,8 +52,23 @@ const exchangesHandlerConfig: HandlerConfig = {
}, },
}; };
export function initializeExchangesHandler(): void { export function initializeExchangesHandler(container: ServiceContainer) {
logger.info('Registering exchanges handler...'); logger.info('Registering exchanges handler...');
handlerRegistry.registerHandler(HANDLER_NAME, exchangesHandlerConfig);
// Update operations to use container
const containerAwareOperations = Object.entries(exchangeOperations).reduce((acc, [key, operation]) => {
acc[key] = createJobHandler(async (payload: any) => {
return operation(payload, container);
});
return acc;
}, {} as Record<string, any>);
const exchangesHandlerConfigWithContainer: HandlerConfig = {
...exchangesHandlerConfig,
operations: containerAwareOperations,
};
handlerRegistry.register(HANDLER_NAME, exchangesHandlerConfigWithContainer);
logger.info('Exchanges handler registered successfully'); logger.info('Exchanges handler registered successfully');
} }

View file

@ -1,10 +1,13 @@
import { getLogger } from '@stock-bot/logger'; import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '../../../clients'; import type { ServiceContainer } from '@stock-bot/di';
import type { JobPayload } from '../../../types/job-payloads'; import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-clear-postgresql-data'); const logger = getLogger('enhanced-sync-clear-postgresql-data');
export async function clearPostgreSQLData(payload: JobPayload): Promise<{ export async function clearPostgreSQLData(
payload: JobPayload,
container: ServiceContainer
): Promise<{
exchangesCleared: number; exchangesCleared: number;
symbolsCleared: number; symbolsCleared: number;
mappingsCleared: number; mappingsCleared: number;
@ -12,7 +15,7 @@ export async function clearPostgreSQLData(payload: JobPayload): Promise<{
logger.info('Clearing existing PostgreSQL data...'); logger.info('Clearing existing PostgreSQL data...');
try { try {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
// Start transaction for atomic operations // Start transaction for atomic operations
await postgresClient.query('BEGIN'); await postgresClient.query('BEGIN');
@ -50,7 +53,7 @@ export async function clearPostgreSQLData(payload: JobPayload): Promise<{
return { exchangesCleared, symbolsCleared, mappingsCleared }; return { exchangesCleared, symbolsCleared, mappingsCleared };
} catch (error) { } catch (error) {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
await postgresClient.query('ROLLBACK'); await postgresClient.query('ROLLBACK');
logger.error('Failed to clear PostgreSQL data', { error }); logger.error('Failed to clear PostgreSQL data', { error });
throw error; throw error;

View file

@ -1,14 +1,17 @@
import { getLogger } from '@stock-bot/logger'; import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '../../../clients'; import type { ServiceContainer } from '@stock-bot/di';
import type { JobPayload, SyncStatus } from '../../../types/job-payloads'; import type { JobPayload, SyncStatus } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-status'); const logger = getLogger('enhanced-sync-status');
export async function getSyncStatus(payload: JobPayload): Promise<SyncStatus[]> { export async function getSyncStatus(
payload: JobPayload,
container: ServiceContainer
): Promise<SyncStatus[]> {
logger.info('Getting comprehensive sync status...'); logger.info('Getting comprehensive sync status...');
try { try {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
const query = ` const query = `
SELECT provider, data_type as "dataType", last_sync_at as "lastSyncAt", SELECT provider, data_type as "dataType", last_sync_at as "lastSyncAt",
last_sync_count as "lastSyncCount", sync_errors as "syncErrors" last_sync_count as "lastSyncCount", sync_errors as "syncErrors"

View file

@ -1,14 +1,17 @@
import { getLogger } from '@stock-bot/logger'; import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '../../../clients'; import type { ServiceContainer } from '@stock-bot/di';
import type { JobPayload } from '../../../types/job-payloads'; import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-exchange-stats'); const logger = getLogger('enhanced-sync-exchange-stats');
export async function getExchangeStats(payload: JobPayload): Promise<any> { export async function getExchangeStats(
payload: JobPayload,
container: ServiceContainer
): Promise<any> {
logger.info('Getting exchange statistics...'); logger.info('Getting exchange statistics...');
try { try {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
const query = ` const query = `
SELECT SELECT
COUNT(*) as total_exchanges, COUNT(*) as total_exchanges,

View file

@ -1,14 +1,17 @@
import { getLogger } from '@stock-bot/logger'; import { getLogger } from '@stock-bot/logger';
import { getPostgreSQLClient } from '../../../clients'; import type { ServiceContainer } from '@stock-bot/di';
import type { JobPayload } from '../../../types/job-payloads'; import type { JobPayload } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-provider-mapping-stats'); const logger = getLogger('enhanced-sync-provider-mapping-stats');
export async function getProviderMappingStats(payload: JobPayload): Promise<any> { export async function getProviderMappingStats(
payload: JobPayload,
container: ServiceContainer
): Promise<any> {
logger.info('Getting provider mapping statistics...'); logger.info('Getting provider mapping statistics...');
try { try {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
const query = ` const query = `
SELECT SELECT
provider, provider,

View file

@ -1,10 +1,10 @@
import { getLogger } from '@stock-bot/logger'; import { getLogger } from '@stock-bot/logger';
import { getMongoDBClient, getPostgreSQLClient } from '../../../clients'; import type { ServiceContainer } from '@stock-bot/di';
import type { JobPayload, SyncResult } from '../../../types/job-payloads'; import type { JobPayload, SyncResult } from '../../../types/job-payloads';
const logger = getLogger('enhanced-sync-all-exchanges'); const logger = getLogger('enhanced-sync-all-exchanges');
export async function syncAllExchanges(payload: JobPayload): Promise<SyncResult> { export async function syncAllExchanges(payload: JobPayload, container: ServiceContainer): Promise<SyncResult> {
const clearFirst = payload.clearFirst || true; const clearFirst = payload.clearFirst || true;
logger.info('Starting comprehensive exchange sync...', { clearFirst }); logger.info('Starting comprehensive exchange sync...', { clearFirst });
@ -17,7 +17,7 @@ export async function syncAllExchanges(payload: JobPayload): Promise<SyncResult>
}; };
try { try {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
// Clear existing data if requested // Clear existing data if requested
if (clearFirst) { if (clearFirst) {
@ -28,11 +28,11 @@ export async function syncAllExchanges(payload: JobPayload): Promise<SyncResult>
await postgresClient.query('BEGIN'); await postgresClient.query('BEGIN');
// 1. Sync from EOD exchanges (comprehensive global data) // 1. Sync from EOD exchanges (comprehensive global data)
const eodResult = await syncEODExchanges(); const eodResult = await syncEODExchanges(container);
mergeResults(result, eodResult); mergeResults(result, eodResult);
// 2. Sync from IB exchanges (detailed asset information) // 2. Sync from IB exchanges (detailed asset information)
const ibResult = await syncIBExchanges(); const ibResult = await syncIBExchanges(container);
mergeResults(result, ibResult); mergeResults(result, ibResult);
// 3. Update sync status // 3. Update sync status
@ -43,13 +43,14 @@ export async function syncAllExchanges(payload: JobPayload): Promise<SyncResult>
logger.info('Comprehensive exchange sync completed', result); logger.info('Comprehensive exchange sync completed', result);
return result; return result;
} catch (error) { } catch (error) {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
await postgresClient.query('ROLLBACK'); await postgresClient.query('ROLLBACK');
logger.error('Comprehensive exchange sync failed', { error }); logger.error('Comprehensive exchange sync failed', { error });
throw error; throw error;
} }
} }
async function clearPostgreSQLData(postgresClient: any): Promise<void> { async function clearPostgreSQLData(postgresClient: any): Promise<void> {
logger.info('Clearing existing PostgreSQL data...'); logger.info('Clearing existing PostgreSQL data...');
@ -66,8 +67,8 @@ async function clearPostgreSQLData(postgresClient: any): Promise<void> {
logger.info('PostgreSQL data cleared successfully'); logger.info('PostgreSQL data cleared successfully');
} }
async function syncEODExchanges(): Promise<SyncResult> { async function syncEODExchanges(container: ServiceContainer): Promise<SyncResult> {
const mongoClient = getMongoDBClient(); const mongoClient = container.mongodb;
const exchanges = await mongoClient.find('eodExchanges', { active: true }); const exchanges = await mongoClient.find('eodExchanges', { active: true });
const result: SyncResult = { processed: 0, created: 0, updated: 0, skipped: 0, errors: 0 }; const result: SyncResult = { processed: 0, created: 0, updated: 0, skipped: 0, errors: 0 };
@ -80,7 +81,8 @@ async function syncEODExchanges(): Promise<SyncResult> {
exchange.Name, exchange.Name,
exchange.CountryISO2, exchange.CountryISO2,
exchange.Currency, exchange.Currency,
0.95 // very high confidence for EOD data 0.95, // very high confidence for EOD data
container
); );
result.processed++; result.processed++;
@ -94,8 +96,8 @@ async function syncEODExchanges(): Promise<SyncResult> {
return result; return result;
} }
async function syncIBExchanges(): Promise<SyncResult> { async function syncIBExchanges(container: ServiceContainer): Promise<SyncResult> {
const mongoClient = getMongoDBClient(); const mongoClient = container.mongodb;
const exchanges = await mongoClient.find('ibExchanges', {}); const exchanges = await mongoClient.find('ibExchanges', {});
const result: SyncResult = { processed: 0, created: 0, updated: 0, skipped: 0, errors: 0 }; const result: SyncResult = { processed: 0, created: 0, updated: 0, skipped: 0, errors: 0 };
@ -108,7 +110,8 @@ async function syncIBExchanges(): Promise<SyncResult> {
exchange.name, exchange.name,
exchange.country_code, exchange.country_code,
'USD', // IB doesn't specify currency, default to USD 'USD', // IB doesn't specify currency, default to USD
0.85 // good confidence for IB data 0.85, // good confidence for IB data
container
); );
result.processed++; result.processed++;
@ -128,16 +131,17 @@ async function createProviderExchangeMapping(
providerExchangeName: string, providerExchangeName: string,
countryCode: string | null, countryCode: string | null,
currency: string | null, currency: string | null,
confidence: number confidence: number,
container: ServiceContainer
): Promise<void> { ): Promise<void> {
if (!providerExchangeCode) { if (!providerExchangeCode) {
return; return;
} }
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
// Check if mapping already exists // Check if mapping already exists
const existingMapping = await findProviderExchangeMapping(provider, providerExchangeCode); const existingMapping = await findProviderExchangeMapping(provider, providerExchangeCode, container);
if (existingMapping) { if (existingMapping) {
// Don't override existing mappings to preserve manual work // Don't override existing mappings to preserve manual work
return; return;
@ -148,7 +152,8 @@ async function createProviderExchangeMapping(
providerExchangeCode, providerExchangeCode,
providerExchangeName, providerExchangeName,
countryCode, countryCode,
currency currency,
container
); );
// Create the provider exchange mapping // Create the provider exchange mapping
@ -175,12 +180,13 @@ async function findOrCreateMasterExchange(
providerCode: string, providerCode: string,
providerName: string, providerName: string,
countryCode: string | null, countryCode: string | null,
currency: string | null currency: string | null,
container: ServiceContainer
): Promise<any> { ): Promise<any> {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
// First, try to find exact match // First, try to find exact match
let masterExchange = await findExchangeByCode(providerCode); let masterExchange = await findExchangeByCode(providerCode, container);
if (masterExchange) { if (masterExchange) {
return masterExchange; return masterExchange;
@ -189,7 +195,7 @@ async function findOrCreateMasterExchange(
// Try to find by similar codes (basic mapping) // Try to find by similar codes (basic mapping)
const basicMapping = getBasicExchangeMapping(providerCode); const basicMapping = getBasicExchangeMapping(providerCode);
if (basicMapping) { if (basicMapping) {
masterExchange = await findExchangeByCode(basicMapping); masterExchange = await findExchangeByCode(basicMapping, container);
if (masterExchange) { if (masterExchange) {
return masterExchange; return masterExchange;
} }
@ -230,17 +236,18 @@ function getBasicExchangeMapping(providerCode: string): string | null {
async function findProviderExchangeMapping( async function findProviderExchangeMapping(
provider: string, provider: string,
providerExchangeCode: string providerExchangeCode: string,
container: ServiceContainer
): Promise<any> { ): Promise<any> {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
const query = const query =
'SELECT * FROM provider_exchange_mappings WHERE provider = $1 AND provider_exchange_code = $2'; 'SELECT * FROM provider_exchange_mappings WHERE provider = $1 AND provider_exchange_code = $2';
const result = await postgresClient.query(query, [provider, providerExchangeCode]); const result = await postgresClient.query(query, [provider, providerExchangeCode]);
return result.rows[0] || null; return result.rows[0] || null;
} }
async function findExchangeByCode(code: string): Promise<any> { async function findExchangeByCode(code: string, container: ServiceContainer): Promise<any> {
const postgresClient = getPostgreSQLClient(); const postgresClient = container.postgres;
const query = 'SELECT * FROM exchanges WHERE code = $1'; const query = 'SELECT * FROM exchanges WHERE code = $1';
const result = await postgresClient.query(query, [code]); const result = await postgresClient.query(query, [code]);
return result.rows[0] || null; return result.rows[0] || null;

View file

@ -0,0 +1,33 @@
/**
* Handler initialization for data pipeline service
* Registers all handlers with the service container
*/
import type { ServiceContainer } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
import { initializeExchangesHandler } from './exchanges/exchanges.handler';
import { initializeSymbolsHandler } from './symbols/symbols.handler';
const logger = getLogger('pipeline-handler-init');
/**
* Initialize all handlers with the service container
*/
export async function initializeAllHandlers(container: ServiceContainer): Promise<void> {
logger.info('Initializing data pipeline handlers...');
try {
// Initialize exchanges handler with container
initializeExchangesHandler(container);
logger.debug('Exchanges handler initialized');
// Initialize symbols handler with container
initializeSymbolsHandler(container);
logger.debug('Symbols handler initialized');
logger.info('All pipeline handlers initialized successfully');
} catch (error) {
logger.error('Failed to initialize handlers', { error });
throw error;
}
}

View file

@ -1,5 +1,6 @@
import { getLogger } from '@stock-bot/logger'; import { getLogger } from '@stock-bot/logger';
import { handlerRegistry, type HandlerConfig, type ScheduledJobConfig } from '@stock-bot/queue'; import { handlerRegistry, createJobHandler, type HandlerConfig, type ScheduledJobConfig } from '@stock-bot/queue';
import type { ServiceContainer } from '@stock-bot/di';
import { symbolOperations } from './operations'; import { symbolOperations } from './operations';
const logger = getLogger('symbols-handler'); const logger = getLogger('symbols-handler');
@ -34,8 +35,22 @@ const symbolsHandlerConfig: HandlerConfig = {
}, },
}; };
export function initializeSymbolsHandler(): void { export function initializeSymbolsHandler(container: ServiceContainer): void {
logger.info('Registering symbols handler...'); logger.info('Registering symbols handler...');
handlerRegistry.registerHandler(HANDLER_NAME, symbolsHandlerConfig);
// Update operations to use container
const containerAwareOperations = Object.entries(symbolOperations).reduce((acc, [key, operation]) => {
acc[key] = createJobHandler(async (payload: any) => {
return operation(payload, container);
});
return acc;
}, {} as Record<string, any>);
const symbolsHandlerConfigWithContainer: HandlerConfig = {
...symbolsHandlerConfig,
operations: containerAwareOperations,
};
handlerRegistry.register(HANDLER_NAME, symbolsHandlerConfigWithContainer);
logger.info('Symbols handler registered successfully'); logger.info('Symbols handler registered successfully');
} }

View file

@ -1,22 +1,31 @@
/**
* Data Pipeline Service with Dependency Injection
* Uses Awilix container for managing database connections and services
*/
// Framework imports // Framework imports
import { Hono } from 'hono'; import { Hono } from 'hono';
import { cors } from 'hono/cors'; import { cors } from 'hono/cors';
import { initializeServiceConfig } from '@stock-bot/config'; import { initializeServiceConfig } from '@stock-bot/config';
// Library imports // Library imports
import {
createServiceContainer,
initializeServices as initializeAwilixServices,
type ServiceContainer
} from '@stock-bot/di';
import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger'; import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger';
import { MongoDBClient } from '@stock-bot/mongodb';
import { PostgreSQLClient } from '@stock-bot/postgres';
import { QueueManager, type QueueManagerConfig } from '@stock-bot/queue';
import { Shutdown } from '@stock-bot/shutdown'; import { Shutdown } from '@stock-bot/shutdown';
import { setMongoDBClient, setPostgreSQLClient } from './clients'; import { handlerRegistry } from '@stock-bot/types';
// Local imports // Local imports
import { enhancedSyncRoutes, healthRoutes, statsRoutes, syncRoutes } from './routes'; import { createRoutes } from './routes/create-routes';
import { setupServiceContainer } from './container-setup';
import { initializeAllHandlers } from './handlers';
const config = initializeServiceConfig(); const config = initializeServiceConfig();
console.log('Data Sync Service Configuration:', JSON.stringify(config, null, 2)); console.log('Data Pipeline Service Configuration:', JSON.stringify(config, null, 2));
const serviceConfig = config.service; const serviceConfig = config.service;
const databaseConfig = config.database;
const queueConfig = config.queue;
if (config.log) { if (config.log) {
setLoggerConfig({ setLoggerConfig({
@ -31,7 +40,62 @@ if (config.log) {
// Create logger AFTER config is set // Create logger AFTER config is set
const logger = getLogger('data-pipeline'); const logger = getLogger('data-pipeline');
const app = new Hono(); const PORT = serviceConfig.port;
let server: ReturnType<typeof Bun.serve> | null = null;
let container: ServiceContainer | null = null;
let app: Hono | null = null;
// Initialize shutdown manager
const shutdown = Shutdown.getInstance({ timeout: 15000 });
// Initialize services with DI pattern
async function initializeServices() {
logger.info('Initializing data pipeline service with DI...');
try {
// Create Awilix container with proper config structure
logger.debug('Creating Awilix DI container...');
const awilixConfig = {
redis: {
host: config.database.dragonfly.host,
port: config.database.dragonfly.port,
db: config.database.dragonfly.db,
},
mongodb: {
uri: config.database.mongodb.uri,
database: config.database.mongodb.database,
},
postgres: {
host: config.database.postgres.host,
port: config.database.postgres.port,
database: config.database.postgres.database,
user: config.database.postgres.user,
password: config.database.postgres.password,
},
questdb: {
enabled: config.database.questdb.enabled || false,
host: config.database.questdb.host,
httpPort: config.database.questdb.httpPort,
pgPort: config.database.questdb.pgPort,
influxPort: config.database.questdb.ilpPort,
database: config.database.questdb.database,
},
};
container = createServiceContainer(awilixConfig);
await initializeAwilixServices(container);
logger.info('Awilix container created and initialized');
// Setup service-specific configuration
const serviceContainer = setupServiceContainer(config, container.resolve('serviceContainer'));
// Initialize migration helper for backward compatibility
const { setContainerForMigration } = await import('./migration-helper');
setContainerForMigration(serviceContainer);
logger.info('Migration helper initialized for backward compatibility');
// Create app with routes
app = new Hono();
// Add CORS middleware // Add CORS middleware
app.use( app.use(
@ -43,117 +107,24 @@ app.use(
credentials: false, credentials: false,
}) })
); );
const PORT = serviceConfig.port;
let server: ReturnType<typeof Bun.serve> | null = null;
let mongoClient: MongoDBClient | null = null;
let postgresClient: PostgreSQLClient | null = null;
let queueManager: QueueManager | null = null;
// Initialize shutdown manager // Create and mount routes using the service container
const shutdown = Shutdown.getInstance({ timeout: 15000 }); const routes = createRoutes(serviceContainer);
app.route('/', routes);
// Mount routes // Initialize handlers with service container
app.route('/health', healthRoutes); logger.debug('Initializing pipeline handlers with DI pattern...');
app.route('/sync', syncRoutes); await initializeAllHandlers(serviceContainer);
app.route('/sync', enhancedSyncRoutes); logger.info('Pipeline handlers initialized with DI pattern');
app.route('/sync/stats', statsRoutes);
// Initialize services
async function initializeServices() {
logger.info('Initializing data sync service...');
try {
// Initialize MongoDB client
logger.debug('Connecting to MongoDB...');
const mongoConfig = databaseConfig.mongodb;
mongoClient = new MongoDBClient(
{
uri: mongoConfig.uri,
database: mongoConfig.database,
host: mongoConfig.host || 'localhost',
port: mongoConfig.port || 27017,
timeouts: {
connectTimeout: 30000,
socketTimeout: 30000,
serverSelectionTimeout: 5000,
},
},
logger
);
await mongoClient.connect();
setMongoDBClient(mongoClient);
logger.info('MongoDB connected');
// Initialize PostgreSQL client
logger.debug('Connecting to PostgreSQL...');
const pgConfig = databaseConfig.postgres;
postgresClient = new PostgreSQLClient(
{
host: pgConfig.host,
port: pgConfig.port,
database: pgConfig.database,
username: pgConfig.user,
password: pgConfig.password,
poolSettings: {
min: 2,
max: pgConfig.poolSize || 10,
idleTimeoutMillis: pgConfig.idleTimeout || 30000,
},
},
logger
);
await postgresClient.connect();
setPostgreSQLClient(postgresClient);
logger.info('PostgreSQL connected');
// Initialize queue system (with delayed worker start)
logger.debug('Initializing queue system...');
const queueManagerConfig: QueueManagerConfig = {
redis: queueConfig?.redis || {
host: 'localhost',
port: 6379,
db: 1,
},
defaultQueueOptions: {
defaultJobOptions: queueConfig?.defaultJobOptions || {
attempts: 3,
backoff: {
type: 'exponential',
delay: 1000,
},
removeOnComplete: 10,
removeOnFail: 5,
},
workers: 2,
concurrency: 1,
enableMetrics: true,
enableDLQ: true,
},
enableScheduledJobs: true,
delayWorkerStart: true, // Prevent workers from starting until all singletons are ready
};
queueManager = QueueManager.getOrInitialize(queueManagerConfig);
logger.info('Queue system initialized');
// Initialize handlers (register handlers and scheduled jobs)
logger.debug('Initializing sync handlers...');
const { initializeExchangesHandler } = await import('./handlers/exchanges/exchanges.handler');
const { initializeSymbolsHandler } = await import('./handlers/symbols/symbols.handler');
initializeExchangesHandler();
initializeSymbolsHandler();
logger.info('Sync handlers initialized');
// Create scheduled jobs from registered handlers // Create scheduled jobs from registered handlers
logger.debug('Creating scheduled jobs from registered handlers...'); logger.debug('Creating scheduled jobs from registered handlers...');
const { handlerRegistry } = await import('@stock-bot/queue'); const allHandlers = handlerRegistry.getAllHandlersWithSchedule();
const allHandlers = handlerRegistry.getAllHandlers();
let totalScheduledJobs = 0; let totalScheduledJobs = 0;
for (const [handlerName, config] of allHandlers) { for (const [handlerName, config] of allHandlers) {
if (config.scheduledJobs && config.scheduledJobs.length > 0) { if (config.scheduledJobs && config.scheduledJobs.length > 0) {
const queueManager = container!.resolve('queueManager');
const queue = queueManager.getQueue(handlerName); const queue = queueManager.getQueue(handlerName);
for (const scheduledJob of config.scheduledJobs) { for (const scheduledJob of config.scheduledJobs) {
@ -161,7 +132,7 @@ async function initializeServices() {
const jobData = { const jobData = {
handler: handlerName, handler: handlerName,
operation: scheduledJob.operation, operation: scheduledJob.operation,
payload: scheduledJob.payload || {}, payload: scheduledJob.payload,
}; };
// Build job options from scheduled job config // Build job options from scheduled job config
@ -192,14 +163,22 @@ async function initializeServices() {
} }
logger.info('Scheduled jobs created', { totalJobs: totalScheduledJobs }); logger.info('Scheduled jobs created', { totalJobs: totalScheduledJobs });
// Now that all singletons are initialized and jobs are scheduled, start the workers // Start queue workers
logger.debug('Starting queue workers...'); logger.debug('Starting queue workers...');
const queueManager = container.resolve('queueManager');
if (queueManager) {
queueManager.startAllWorkers(); queueManager.startAllWorkers();
logger.info('Queue workers started'); logger.info('Queue workers started');
}
logger.info('All services initialized successfully'); logger.info('All services initialized successfully');
} catch (error) { } catch (error) {
logger.error('Failed to initialize services', { error }); console.error('DETAILED ERROR:', error);
logger.error('Failed to initialize services', {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
details: JSON.stringify(error, null, 2)
});
throw error; throw error;
} }
} }
@ -208,13 +187,17 @@ async function initializeServices() {
async function startServer() { async function startServer() {
await initializeServices(); await initializeServices();
if (!app) {
throw new Error('App not initialized');
}
server = Bun.serve({ server = Bun.serve({
port: PORT, port: PORT,
fetch: app.fetch, fetch: app.fetch,
development: config.environment === 'development', development: config.environment === 'development',
}); });
logger.info(`Data Sync Service started on port ${PORT}`); logger.info(`Data pipeline service started on port ${PORT}`);
} }
// Register shutdown handlers with priorities // Register shutdown handlers with priorities
@ -222,6 +205,7 @@ async function startServer() {
shutdown.onShutdownHigh(async () => { shutdown.onShutdownHigh(async () => {
logger.info('Shutting down queue system...'); logger.info('Shutting down queue system...');
try { try {
const queueManager = container?.resolve('queueManager');
if (queueManager) { if (queueManager) {
await queueManager.shutdown(); await queueManager.shutdown();
} }
@ -244,21 +228,27 @@ shutdown.onShutdownHigh(async () => {
} }
}, 'HTTP Server'); }, 'HTTP Server');
// Priority 2: Database connections (medium priority) // Priority 2: Services and connections (medium priority)
shutdown.onShutdownMedium(async () => { shutdown.onShutdownMedium(async () => {
logger.info('Disconnecting from databases...'); logger.info('Disposing services and connections...');
try { try {
if (mongoClient) { if (container) {
await mongoClient.disconnect(); // Disconnect database clients
const mongoClient = container.resolve('mongoClient');
if (mongoClient?.disconnect) await mongoClient.disconnect();
const postgresClient = container.resolve('postgresClient');
if (postgresClient?.disconnect) await postgresClient.disconnect();
const questdbClient = container.resolve('questdbClient');
if (questdbClient?.disconnect) await questdbClient.disconnect();
logger.info('All services disposed successfully');
} }
if (postgresClient) {
await postgresClient.disconnect();
}
logger.info('Database connections closed');
} catch (error) { } catch (error) {
logger.error('Error closing database connections', { error }); logger.error('Error disposing services', { error });
} }
}, 'Databases'); }, 'Services');
// Priority 3: Logger shutdown (lowest priority - runs last) // Priority 3: Logger shutdown (lowest priority - runs last)
shutdown.onShutdownLow(async () => { shutdown.onShutdownLow(async () => {
@ -273,8 +263,8 @@ shutdown.onShutdownLow(async () => {
// Start the service // Start the service
startServer().catch(error => { startServer().catch(error => {
logger.fatal('Failed to start data sync service', { error }); logger.fatal('Failed to start data pipeline service', { error });
process.exit(1); process.exit(1);
}); });
logger.info('Data sync service startup initiated'); logger.info('Data pipeline service startup initiated with DI pattern');

View file

@ -0,0 +1,37 @@
/**
* Temporary migration helper for data-pipeline service
* Provides backward compatibility while migrating to DI container
*
* TODO: Remove this file once all operations are migrated to use ServiceContainer
*/
import type { ServiceContainer } from '@stock-bot/di';
import type { MongoDBClient } from '@stock-bot/mongodb';
import type { PostgreSQLClient } from '@stock-bot/postgres';
let containerInstance: ServiceContainer | null = null;
export function setContainerForMigration(container: ServiceContainer): void {
containerInstance = container;
}
export function getMongoDBClient(): MongoDBClient {
if (!containerInstance) {
throw new Error('Container not initialized. This is a migration helper - please update the operation to accept ServiceContainer parameter');
}
return containerInstance.mongodb;
}
export function getPostgreSQLClient(): PostgreSQLClient {
if (!containerInstance) {
throw new Error('Container not initialized. This is a migration helper - please update the operation to accept ServiceContainer parameter');
}
return containerInstance.postgres;
}
export function getQuestDBClient(): any {
if (!containerInstance) {
throw new Error('Container not initialized. This is a migration helper - please update the operation to accept ServiceContainer parameter');
}
return containerInstance.questdb;
}

View file

@ -0,0 +1,26 @@
/**
* Route factory for data pipeline service
* Creates routes with access to the service container
*/
import { Hono } from 'hono';
import type { ServiceContainer } from '@stock-bot/di';
import { healthRoutes, syncRoutes, enhancedSyncRoutes, statsRoutes } from './index';
export function createRoutes(container: ServiceContainer): Hono {
const app = new Hono();
// Add container to context for all routes
app.use('*', async (c, next) => {
c.set('container', container);
await next();
});
// Mount routes
app.route('/health', healthRoutes);
app.route('/sync', syncRoutes);
app.route('/sync', enhancedSyncRoutes);
app.route('/sync/stats', statsRoutes);
return app;
}

View file

@ -1,27 +1,8 @@
import { MongoDBClient } from '@stock-bot/mongodb'; /**
import { PostgreSQLClient } from '@stock-bot/postgres'; * Client exports for backward compatibility
*
* @deprecated Use ServiceContainer parameter instead
* This file will be removed once all routes and services are migrated
*/
let postgresClient: PostgreSQLClient | null = null; export { getMongoDBClient, getPostgreSQLClient } from './migration-helper';
let mongodbClient: MongoDBClient | null = null;
export function setPostgreSQLClient(client: PostgreSQLClient): void {
postgresClient = client;
}
export function getPostgreSQLClient(): PostgreSQLClient {
if (!postgresClient) {
throw new Error('PostgreSQL client not initialized. Call setPostgreSQLClient first.');
}
return postgresClient;
}
export function setMongoDBClient(client: MongoDBClient): void {
mongodbClient = client;
}
export function getMongoDBClient(): MongoDBClient {
if (!mongodbClient) {
throw new Error('MongoDB client not initialized. Call setMongoDBClient first.');
}
return mongodbClient;
}

View file

@ -0,0 +1,34 @@
/**
* Service Container Setup for Web API
* Configures dependency injection for the web API service
*/
import type { ServiceContainer } from '@stock-bot/di';
import { getLogger } from '@stock-bot/logger';
import type { AppConfig } from '@stock-bot/config';
const logger = getLogger('web-api-container');
/**
* Configure the service container for web API workloads
*/
export function setupServiceContainer(
config: AppConfig,
container: ServiceContainer
): ServiceContainer {
logger.info('Configuring web API service container...');
// Web API specific configuration
// This service mainly reads data, so smaller pool sizes are fine
const poolSizes = {
mongodb: config.environment === 'production' ? 20 : 10,
postgres: config.environment === 'production' ? 30 : 15,
cache: config.environment === 'production' ? 20 : 10,
};
logger.info('Web API pool sizes configured', poolSizes);
// The container is already configured with connections
// Just return it with our logging
return container;
}

View file

@ -1,60 +1,111 @@
/** /**
* Stock Bot Web API - REST API service for web application * Stock Bot Web API with Dependency Injection
* REST API service using Awilix container for managing connections
*/ */
// Framework imports
import { Hono } from 'hono'; import { Hono } from 'hono';
import { cors } from 'hono/cors'; import { cors } from 'hono/cors';
import { initializeServiceConfig } from '@stock-bot/config'; import { initializeServiceConfig } from '@stock-bot/config';
// Library imports
import {
createServiceContainer,
initializeServices as initializeAwilixServices,
type ServiceContainer
} from '@stock-bot/di';
import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger'; import { getLogger, setLoggerConfig, shutdownLoggers } from '@stock-bot/logger';
import { MongoDBClient } from '@stock-bot/mongodb';
import { PostgreSQLClient } from '@stock-bot/postgres';
import { Shutdown } from '@stock-bot/shutdown'; import { Shutdown } from '@stock-bot/shutdown';
import { exchangeRoutes } from './routes/exchange.routes';
import { healthRoutes } from './routes/health.routes';
// Import routes
import { setMongoDBClient, setPostgreSQLClient } from './clients';
// Initialize configuration with automatic monorepo config inheritance // Local imports
const config = await initializeServiceConfig(); import { createRoutes } from './routes/create-routes';
import { setupServiceContainer } from './container-setup';
const config = initializeServiceConfig();
console.log('Web API Service Configuration:', JSON.stringify(config, null, 2));
const serviceConfig = config.service; const serviceConfig = config.service;
const databaseConfig = config.database;
// Initialize logger with config if (config.log) {
const loggingConfig = config.logging;
if (loggingConfig) {
setLoggerConfig({ setLoggerConfig({
logLevel: loggingConfig.level, logLevel: config.log.level,
logConsole: true, logConsole: true,
logFile: false, logFile: false,
environment: config.environment, environment: config.environment,
hideObject: config.log.hideObject,
}); });
} }
const app = new Hono(); // Create logger AFTER config is set
const logger = getLogger('web-api');
const PORT = serviceConfig.port;
let server: ReturnType<typeof Bun.serve> | null = null;
let container: ServiceContainer | null = null;
let app: Hono | null = null;
// Initialize shutdown manager
const shutdown = Shutdown.getInstance({ timeout: 15000 });
// Initialize services with DI pattern
async function initializeServices() {
logger.info('Initializing web API service with DI...');
try {
// Create Awilix container with proper config structure
logger.debug('Creating Awilix DI container...');
const awilixConfig = {
redis: {
host: config.database.dragonfly.host,
port: config.database.dragonfly.port,
db: config.database.dragonfly.db,
},
mongodb: {
uri: config.database.mongodb.uri,
database: config.database.mongodb.database,
},
postgres: {
host: config.database.postgres.host,
port: config.database.postgres.port,
database: config.database.postgres.database,
user: config.database.postgres.user,
password: config.database.postgres.password,
},
questdb: {
enabled: false, // Web API doesn't need QuestDB
host: config.database.questdb.host,
httpPort: config.database.questdb.httpPort,
pgPort: config.database.questdb.pgPort,
influxPort: config.database.questdb.ilpPort,
database: config.database.questdb.database,
},
};
container = createServiceContainer(awilixConfig);
await initializeAwilixServices(container);
logger.info('Awilix container created and initialized');
// Setup service-specific configuration
const serviceContainer = setupServiceContainer(config, container.resolve('serviceContainer'));
// Initialize migration helper for backward compatibility
const { setContainerForMigration } = await import('./migration-helper');
setContainerForMigration(serviceContainer);
logger.info('Migration helper initialized for backward compatibility');
// Create app with routes
app = new Hono();
// Add CORS middleware // Add CORS middleware
app.use( app.use(
'*', '*',
cors({ cors({
origin: ['http://localhost:4200', 'http://localhost:3000', 'http://localhost:3002'], // React dev server ports origin: ['http://localhost:4200', 'http://localhost:3000', 'http://localhost:3002'],
allowMethods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'], allowMethods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'],
allowHeaders: ['Content-Type', 'Authorization'], allowHeaders: ['Content-Type', 'Authorization'],
credentials: true, credentials: true,
}) })
); );
const logger = getLogger('web-api');
const PORT = serviceConfig.port;
let server: ReturnType<typeof Bun.serve> | null = null;
let postgresClient: PostgreSQLClient | null = null;
let mongoClient: MongoDBClient | null = null;
// Initialize shutdown manager
const shutdown = Shutdown.getInstance({ timeout: 15000 });
// Add routes
app.route('/health', healthRoutes);
app.route('/api/exchanges', exchangeRoutes);
// Basic API info endpoint // Basic API info endpoint
app.get('/', c => { app.get('/', c => {
return c.json({ return c.json({
@ -69,57 +120,18 @@ app.get('/', c => {
}); });
}); });
// Initialize services // Create and mount routes using the service container
async function initializeServices() { const routes = createRoutes(serviceContainer);
logger.info('Initializing web API service...'); app.route('/', routes);
try {
// Initialize MongoDB client
logger.debug('Connecting to MongoDB...');
const mongoConfig = databaseConfig.mongodb;
mongoClient = new MongoDBClient(
{
uri: mongoConfig.uri,
database: mongoConfig.database,
host: mongoConfig.host,
port: mongoConfig.port,
timeouts: {
connectTimeout: 30000,
socketTimeout: 30000,
serverSelectionTimeout: 5000,
},
},
logger
);
await mongoClient.connect();
setMongoDBClient(mongoClient);
logger.info('MongoDB connected');
// Initialize PostgreSQL client
logger.debug('Connecting to PostgreSQL...');
const pgConfig = databaseConfig.postgres;
postgresClient = new PostgreSQLClient(
{
host: pgConfig.host,
port: pgConfig.port,
database: pgConfig.database,
username: pgConfig.user,
password: pgConfig.password,
poolSettings: {
min: 2,
max: pgConfig.poolSize || 10,
idleTimeoutMillis: pgConfig.idleTimeout || 30000,
},
},
logger
);
await postgresClient.connect();
setPostgreSQLClient(postgresClient);
logger.info('PostgreSQL connected');
logger.info('All services initialized successfully'); logger.info('All services initialized successfully');
} catch (error) { } catch (error) {
logger.error('Failed to initialize services', { error }); console.error('DETAILED ERROR:', error);
logger.error('Failed to initialize services', {
error: error instanceof Error ? error.message : String(error),
stack: error instanceof Error ? error.stack : undefined,
details: JSON.stringify(error, null, 2)
});
throw error; throw error;
} }
} }
@ -128,17 +140,22 @@ async function initializeServices() {
async function startServer() { async function startServer() {
await initializeServices(); await initializeServices();
if (!app) {
throw new Error('App not initialized');
}
server = Bun.serve({ server = Bun.serve({
port: PORT, port: PORT,
fetch: app.fetch, fetch: app.fetch,
development: config.environment === 'development', development: config.environment === 'development',
}); });
logger.info(`Stock Bot Web API started on port ${PORT}`); logger.info(`Web API service started on port ${PORT}`);
} }
// Register shutdown handlers // Register shutdown handlers with priorities
shutdown.onShutdown(async () => { // Priority 1: HTTP Server (high priority)
shutdown.onShutdownHigh(async () => {
if (server) { if (server) {
logger.info('Stopping HTTP server...'); logger.info('Stopping HTTP server...');
try { try {
@ -148,36 +165,42 @@ shutdown.onShutdown(async () => {
logger.error('Error stopping HTTP server', { error }); logger.error('Error stopping HTTP server', { error });
} }
} }
}); }, 'HTTP Server');
shutdown.onShutdown(async () => { // Priority 2: Services and connections (medium priority)
logger.info('Disconnecting from databases...'); shutdown.onShutdownMedium(async () => {
logger.info('Disposing services and connections...');
try { try {
if (mongoClient) { if (container) {
await mongoClient.disconnect(); // Disconnect database clients
const mongoClient = container.resolve('mongoClient');
if (mongoClient?.disconnect) await mongoClient.disconnect();
const postgresClient = container.resolve('postgresClient');
if (postgresClient?.disconnect) await postgresClient.disconnect();
logger.info('All services disposed successfully');
} }
if (postgresClient) {
await postgresClient.disconnect();
}
logger.info('Database connections closed');
} catch (error) { } catch (error) {
logger.error('Error closing database connections', { error }); logger.error('Error disposing services', { error });
} }
}); }, 'Services');
shutdown.onShutdown(async () => { // Priority 3: Logger shutdown (lowest priority - runs last)
shutdown.onShutdownLow(async () => {
try { try {
logger.info('Shutting down loggers...');
await shutdownLoggers(); await shutdownLoggers();
// process.stdout.write('Web API loggers shut down\n'); // Don't log after shutdown
} catch (error) { } catch {
process.stderr.write(`Error shutting down loggers: ${error}\n`); // Silently ignore logger shutdown errors
} }
}); }, 'Loggers');
// Start the service // Start the service
startServer().catch(error => { startServer().catch(error => {
logger.error('Failed to start web API service', { error }); logger.fatal('Failed to start web API service', { error });
process.exit(1); process.exit(1);
}); });
logger.info('Web API service startup initiated'); logger.info('Web API service startup initiated with DI pattern');

View file

@ -0,0 +1,30 @@
/**
* Temporary migration helper for web-api service
* Provides backward compatibility while migrating to DI container
*
* TODO: Remove this file once all routes and services are migrated to use ServiceContainer
*/
import type { ServiceContainer } from '@stock-bot/di';
import type { MongoDBClient } from '@stock-bot/mongodb';
import type { PostgreSQLClient } from '@stock-bot/postgres';
let containerInstance: ServiceContainer | null = null;
export function setContainerForMigration(container: ServiceContainer): void {
containerInstance = container;
}
export function getMongoDBClient(): MongoDBClient {
if (!containerInstance) {
throw new Error('Container not initialized. This is a migration helper - please update the service to accept ServiceContainer parameter');
}
return containerInstance.mongodb;
}
export function getPostgreSQLClient(): PostgreSQLClient {
if (!containerInstance) {
throw new Error('Container not initialized. This is a migration helper - please update the service to accept ServiceContainer parameter');
}
return containerInstance.postgres;
}

View file

@ -0,0 +1,24 @@
/**
* Route factory for web API service
* Creates routes with access to the service container
*/
import { Hono } from 'hono';
import type { ServiceContainer } from '@stock-bot/di';
import { healthRoutes, exchangeRoutes } from './index';
export function createRoutes(container: ServiceContainer): Hono {
const app = new Hono();
// Add container to context for all routes
app.use('*', async (c, next) => {
c.set('container', container);
await next();
});
// Mount routes
app.route('/health', healthRoutes);
app.route('/api/exchanges', exchangeRoutes);
return app;
}