stock-bot/apps/data-ingestion/src/setup/database-setup.ts
2025-06-21 19:42:20 -04:00

188 lines
No EOL
5.7 KiB
TypeScript

import { getDatabaseConfig, getQueueConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import {
ConnectionFactory,
ServiceContainer,
PoolSizeCalculator,
createServiceContainer
} from '@stock-bot/di';
import type { DynamicPoolConfig } from '@stock-bot/mongodb';
const logger = getLogger('database-setup');
/**
* Creates a connection factory configured for the data-ingestion service
*/
export function createConnectionFactory(): ConnectionFactory {
const dbConfig = getDatabaseConfig();
const factoryConfig: ConnectionFactoryConfig = {
service: 'data-ingestion',
environment: process.env.NODE_ENV as 'development' | 'production' | 'test' || 'development',
pools: {
mongodb: {
poolSize: 50, // Higher for batch imports
},
postgres: {
poolSize: 30,
},
cache: {
poolSize: 20,
},
queue: {
poolSize: 1, // QueueManager is a singleton
}
}
};
return new ConnectionFactory(factoryConfig);
}
/**
* Sets up the service container with all dependencies
*/
export async function setupServiceContainer(): Promise<{ container: ServiceContainer, factory: ConnectionFactory }> {
logger.info('Setting up service container for data-ingestion');
const connectionFactory = createConnectionFactory();
const dbConfig = getDatabaseConfig();
// Create enhanced service container with connection factory
const container = createServiceContainer('data-ingestion', connectionFactory, {
database: dbConfig
});
// Override the default database connections with specific configurations
// MongoDB with dynamic pool sizing for batch operations
container.register({
name: 'mongodb',
factory: async () => {
const poolSize = PoolSizeCalculator.calculate('data-ingestion', 'batch-import');
const pool = await connectionFactory.createMongoDB({
name: 'data-ingestion',
config: {
uri: dbConfig.mongodb.uri,
database: dbConfig.mongodb.database,
host: dbConfig.mongodb.host,
port: dbConfig.mongodb.port,
username: dbConfig.mongodb.username,
password: dbConfig.mongodb.password,
authSource: dbConfig.mongodb.authSource,
poolSettings: {
maxPoolSize: poolSize.max,
minPoolSize: poolSize.min,
maxIdleTime: 30000,
}
},
maxConnections: poolSize.max,
minConnections: poolSize.min,
});
return pool.client;
},
singleton: true,
});
// PostgreSQL with optimized settings for data ingestion
container.register({
name: 'postgres',
factory: async () => {
const poolSize = PoolSizeCalculator.calculate('data-ingestion');
const pool = await connectionFactory.createPostgreSQL({
name: 'data-ingestion',
config: {
host: dbConfig.postgresql.host,
port: dbConfig.postgresql.port,
database: dbConfig.postgresql.database,
username: dbConfig.postgresql.user,
password: dbConfig.postgresql.password,
poolSettings: {
max: poolSize.max,
min: poolSize.min,
idleTimeoutMillis: 30000,
}
},
maxConnections: poolSize.max,
minConnections: poolSize.min,
});
return pool.client;
},
singleton: true,
});
// Cache with data-ingestion specific configuration
container.register({
name: 'cache',
factory: async () => {
const pool = await connectionFactory.createCache({
name: 'data-ingestion',
config: {
host: dbConfig.dragonfly.host,
port: dbConfig.dragonfly.port,
db: dbConfig.dragonfly.db,
}
});
return pool.client;
},
singleton: true,
});
// Queue with data-ingestion specific configuration
container.register({
name: 'queue',
factory: async () => {
const pool = await connectionFactory.createQueue({
name: 'data-ingestion',
config: {
host: dbConfig.dragonfly.host,
port: dbConfig.dragonfly.port,
db: dbConfig.dragonfly.db || 1,
}
});
return pool.client;
},
singleton: true,
});
logger.info('Service container setup complete');
// Optional: Enable dynamic pool sizing for production
if (process.env.NODE_ENV === 'production') {
await enableDynamicPoolSizing(container);
}
return { container, factory: connectionFactory };
}
/**
* Enable dynamic pool sizing for production workloads
*/
async function enableDynamicPoolSizing(container: ServiceContainer): Promise<void> {
const dynamicConfig: DynamicPoolConfig = {
enabled: true,
minSize: 5,
maxSize: 100,
scaleUpThreshold: 70,
scaleDownThreshold: 30,
scaleUpIncrement: 10,
scaleDownIncrement: 5,
evaluationInterval: 30000, // Check every 30 seconds
};
try {
// Set dynamic config for MongoDB
const mongoClient = await container.resolveAsync('mongodb');
if (mongoClient && typeof mongoClient.setDynamicPoolConfig === 'function') {
mongoClient.setDynamicPoolConfig(dynamicConfig);
logger.info('Dynamic pool sizing enabled for MongoDB');
}
// Set dynamic config for PostgreSQL
const pgClient = await container.resolveAsync('postgres');
if (pgClient && typeof pgClient.setDynamicPoolConfig === 'function') {
pgClient.setDynamicPoolConfig(dynamicConfig);
logger.info('Dynamic pool sizing enabled for PostgreSQL');
}
} catch (error) {
logger.warn('Failed to enable dynamic pool sizing', { error });
}
}