added new di with connection pool and rebuild of the database/cache services

This commit is contained in:
Boki 2025-06-21 14:54:51 -04:00
parent be6afef832
commit 09d907a10c
26 changed files with 4844 additions and 205 deletions

View file

@ -0,0 +1,18 @@
{
"name": "@stock-bot/connection-factory",
"version": "1.0.0",
"main": "./src/index.ts",
"types": "./src/index.ts",
"dependencies": {
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/mongodb-client": "workspace:*",
"@stock-bot/postgres-client": "workspace:*",
"@stock-bot/cache": "workspace:*",
"mongodb": "^6.3.0",
"pg": "^8.11.3"
},
"devDependencies": {
"@types/pg": "^8.10.7"
}
}

View file

@ -0,0 +1,234 @@
import { getLogger, type Logger } from '@stock-bot/logger';
import { MongoDBClient, createMongoDBClient, type ConnectionEvents } from '@stock-bot/mongodb-client';
import { PostgreSQLClient, createPostgreSQLClient } from '@stock-bot/postgres-client';
import { createCache, type CacheProvider } from '@stock-bot/cache';
import type {
ConnectionFactory as IConnectionFactory,
ConnectionPool,
ConnectionFactoryConfig,
MongoDBPoolConfig,
PostgreSQLPoolConfig,
CachePoolConfig,
PoolMetrics,
} from './types';
export class ConnectionFactory implements IConnectionFactory {
private readonly logger: Logger;
private readonly pools: Map<string, ConnectionPool<any>> = new Map();
private readonly config: ConnectionFactoryConfig;
constructor(config: ConnectionFactoryConfig) {
this.config = config;
this.logger = getLogger(`connection-factory:${config.service}`);
}
async createMongoDB(poolConfig: MongoDBPoolConfig): Promise<ConnectionPool<MongoDBClient>> {
const key = `mongodb:${poolConfig.name}`;
if (this.pools.has(key)) {
this.logger.debug('Reusing existing MongoDB pool', { name: poolConfig.name });
return this.pools.get(key)!;
}
this.logger.info('Creating MongoDB connection pool', {
name: poolConfig.name,
poolSize: poolConfig.poolSize,
});
try {
const events: ConnectionEvents = {
onConnect: () => {
this.logger.debug('MongoDB connected', { pool: poolConfig.name });
},
onDisconnect: () => {
this.logger.debug('MongoDB disconnected', { pool: poolConfig.name });
},
onError: (error) => {
this.logger.error('MongoDB error', { pool: poolConfig.name, error });
},
};
const client = createMongoDBClient({
...poolConfig.config,
poolSettings: {
maxPoolSize: poolConfig.maxConnections || poolConfig.poolSize || 10,
minPoolSize: poolConfig.minConnections || 2,
maxIdleTime: 30000,
}
}, events);
await client.connect();
// Warm up the pool
if (poolConfig.minConnections) {
await client.warmupPool();
}
const pool: ConnectionPool<MongoDBClient> = {
name: poolConfig.name,
client,
metrics: client.getPoolMetrics(),
health: async () => {
try {
await client.getDatabase().admin().ping();
return true;
} catch {
return false;
}
},
dispose: async () => {
await client.disconnect();
this.pools.delete(key);
},
};
this.pools.set(key, pool);
return pool;
} catch (error) {
this.logger.error('Failed to create MongoDB pool', { name: poolConfig.name, error });
throw error;
}
}
async createPostgreSQL(poolConfig: PostgreSQLPoolConfig): Promise<ConnectionPool<PostgreSQLClient>> {
const key = `postgres:${poolConfig.name}`;
if (this.pools.has(key)) {
this.logger.debug('Reusing existing PostgreSQL pool', { name: poolConfig.name });
return this.pools.get(key)!;
}
this.logger.info('Creating PostgreSQL connection pool', {
name: poolConfig.name,
poolSize: poolConfig.poolSize,
});
try {
const events: ConnectionEvents = {
onConnect: () => {
this.logger.debug('PostgreSQL connected', { pool: poolConfig.name });
},
onDisconnect: () => {
this.logger.debug('PostgreSQL disconnected', { pool: poolConfig.name });
},
onError: (error) => {
this.logger.error('PostgreSQL error', { pool: poolConfig.name, error });
},
};
const client = createPostgreSQLClient({
...poolConfig.config,
poolSettings: {
max: poolConfig.maxConnections || poolConfig.poolSize || 10,
min: poolConfig.minConnections || 2,
idleTimeoutMillis: poolConfig.idleTimeoutMillis || 30000,
},
}, undefined, events);
await client.connect();
// Warm up the pool
if (poolConfig.minConnections) {
await client.warmupPool();
}
const pool: ConnectionPool<PostgreSQLClient> = {
name: poolConfig.name,
client,
metrics: client.getPoolMetrics(),
health: async () => client.connected,
dispose: async () => {
await client.disconnect();
this.pools.delete(key);
},
};
this.pools.set(key, pool);
return pool;
} catch (error) {
this.logger.error('Failed to create PostgreSQL pool', { name: poolConfig.name, error });
throw error;
}
}
createCache(poolConfig: CachePoolConfig): ConnectionPool<CacheProvider> {
const key = `cache:${poolConfig.name}`;
if (this.pools.has(key)) {
this.logger.debug('Reusing existing cache pool', { name: poolConfig.name });
return this.pools.get(key)!;
}
this.logger.info('Creating cache connection pool', {
name: poolConfig.name,
});
try {
const cache = createCache({
...poolConfig.config,
keyPrefix: `${this.config.service}:${poolConfig.name}:`,
shared: false, // Each pool gets its own connection
});
const pool: ConnectionPool<CacheProvider> = {
name: poolConfig.name,
client: cache,
metrics: this.createInitialMetrics(),
health: async () => cache.health(),
dispose: async () => {
// Cache disposal handled internally
this.pools.delete(key);
},
};
this.pools.set(key, pool);
return pool;
} catch (error) {
this.logger.error('Failed to create cache pool', { name: poolConfig.name, error });
throw error;
}
}
getPool(type: 'mongodb' | 'postgres' | 'cache', name: string): ConnectionPool<any> | undefined {
const key = `${type}:${name}`;
return this.pools.get(key);
}
listPools(): Array<{ type: string; name: string; metrics: PoolMetrics }> {
const result: Array<{ type: string; name: string; metrics: PoolMetrics }> = [];
for (const [key, pool] of this.pools.entries()) {
const [type, ...nameParts] = key.split(':');
result.push({
type: type || 'unknown',
name: nameParts.join(':'),
metrics: pool.metrics,
});
}
return result;
}
async disposeAll(): Promise<void> {
this.logger.info('Disposing all connection pools', { count: this.pools.size });
const disposePromises: Promise<void>[] = [];
for (const pool of this.pools.values()) {
disposePromises.push(pool.dispose());
}
await Promise.all(disposePromises);
this.pools.clear();
}
private createInitialMetrics(): PoolMetrics {
return {
created: new Date(),
totalConnections: 0,
activeConnections: 0,
idleConnections: 0,
waitingRequests: 0,
errors: 0,
};
}
}

View file

@ -0,0 +1,23 @@
export { ConnectionFactory } from './connection-factory';
export { ServiceContainer, createServiceContainer } from './service-container';
export { PoolSizeCalculator } from './pool-size-calculator';
export type {
ConnectionPoolConfig,
MongoDBPoolConfig,
PostgreSQLPoolConfig,
CachePoolConfig,
ConnectionFactoryConfig,
ConnectionPool,
PoolMetrics,
ConnectionFactory as IConnectionFactory,
} from './types';
export type {
ServiceRegistration,
ServiceResolver,
} from './service-container';
export type {
PoolSizeRecommendation,
} from './pool-size-calculator';

View file

@ -0,0 +1,80 @@
import type { ConnectionPoolConfig } from './types';
export interface PoolSizeRecommendation {
min: number;
max: number;
idle: number;
}
export class PoolSizeCalculator {
private static readonly DEFAULT_SIZES: Record<string, PoolSizeRecommendation> = {
// Service-level defaults
'data-ingestion': { min: 5, max: 50, idle: 10 },
'data-pipeline': { min: 3, max: 30, idle: 5 },
'processing-service': { min: 2, max: 20, idle: 3 },
'web-api': { min: 2, max: 10, idle: 2 },
'portfolio-service': { min: 2, max: 15, idle: 3 },
'strategy-service': { min: 3, max: 25, idle: 5 },
'execution-service': { min: 2, max: 10, idle: 2 },
// Handler-level defaults
'batch-import': { min: 10, max: 100, idle: 20 },
'real-time': { min: 2, max: 10, idle: 3 },
'analytics': { min: 5, max: 30, idle: 10 },
'reporting': { min: 3, max: 20, idle: 5 },
};
static calculate(
serviceName: string,
handlerName?: string,
customConfig?: Partial<ConnectionPoolConfig>
): PoolSizeRecommendation {
// Check for custom configuration first
if (customConfig?.minConnections && customConfig?.maxConnections) {
return {
min: customConfig.minConnections,
max: customConfig.maxConnections,
idle: Math.floor((customConfig.minConnections + customConfig.maxConnections) / 4),
};
}
// Try handler-specific sizes first, then service-level
const key = handlerName || serviceName;
const recommendation = this.DEFAULT_SIZES[key] || this.DEFAULT_SIZES[serviceName];
if (recommendation) {
return { ...recommendation };
}
// Fall back to generic defaults
return {
min: 2,
max: 10,
idle: 3,
};
}
static getOptimalPoolSize(
expectedConcurrency: number,
averageQueryTimeMs: number,
targetLatencyMs: number
): number {
// Little's Law: L = λ * W
// L = number of connections needed
// λ = arrival rate (requests per second)
// W = average time in system (seconds)
const requestsPerSecond = expectedConcurrency;
const averageTimeInSystem = averageQueryTimeMs / 1000;
const minConnections = Math.ceil(requestsPerSecond * averageTimeInSystem);
// Add buffer for burst traffic (20% overhead)
const recommendedSize = Math.ceil(minConnections * 1.2);
// Ensure we meet target latency
const latencyBasedSize = Math.ceil(expectedConcurrency * (averageQueryTimeMs / targetLatencyMs));
return Math.max(recommendedSize, latencyBasedSize, 2); // Minimum 2 connections
}
}

View file

@ -0,0 +1,147 @@
import { getLogger, type Logger } from '@stock-bot/logger';
import type { ConnectionFactory } from './connection-factory';
export interface ServiceRegistration<T = any> {
name: string;
factory: () => T | Promise<T>;
singleton?: boolean;
dispose?: (instance: T) => Promise<void>;
}
export interface ServiceResolver {
resolve<T>(name: string, options?: any): T;
resolveAsync<T>(name: string, options?: any): Promise<T>;
}
export class ServiceContainer implements ServiceResolver {
private readonly logger: Logger;
private readonly registrations = new Map<string, ServiceRegistration>();
private readonly instances = new Map<string, any>();
private readonly scopedInstances = new Map<string, any>();
private readonly parent?: ServiceContainer;
constructor(name: string, parent?: ServiceContainer) {
this.logger = getLogger(`service-container:${name}`);
this.parent = parent;
}
register<T>(registration: ServiceRegistration<T>): void {
this.registrations.set(registration.name, registration);
this.logger.debug('Service registered', { name: registration.name, singleton: registration.singleton });
}
resolve<T>(name: string, options?: any): T {
const instance = this.resolveAsync<T>(name, options);
if (instance instanceof Promise) {
throw new Error(`Service ${name} is async. Use resolveAsync() instead.`);
}
return instance as T;
}
async resolveAsync<T>(name: string, _options?: any): Promise<T> {
// Check scoped instances first
if (this.scopedInstances.has(name)) {
return this.scopedInstances.get(name);
}
// Check singleton instances
if (this.instances.has(name)) {
return this.instances.get(name);
}
// Get registration from this container or parent
const registration = this.getRegistration(name);
if (!registration) {
throw new Error(`Service ${name} not registered`);
}
// Create instance
const instance = await Promise.resolve(registration.factory());
// Store based on singleton flag
if (registration.singleton) {
this.instances.set(name, instance);
} else {
this.scopedInstances.set(name, instance);
}
return instance as T;
}
createScope(): ServiceContainer {
return new ServiceContainer('scoped', this);
}
async dispose(): Promise<void> {
// Dispose scoped instances
for (const [name, instance] of this.scopedInstances.entries()) {
const registration = this.getRegistration(name);
if (registration?.dispose) {
await registration.dispose(instance);
}
}
this.scopedInstances.clear();
// Only dispose singletons if this is the root container
if (!this.parent) {
for (const [name, instance] of this.instances.entries()) {
const registration = this.registrations.get(name);
if (registration?.dispose) {
await registration.dispose(instance);
}
}
this.instances.clear();
}
}
private getRegistration(name: string): ServiceRegistration | undefined {
return this.registrations.get(name) || this.parent?.getRegistration(name);
}
}
// Helper to create pre-configured containers for services
export function createServiceContainer(
serviceName: string,
connectionFactory: ConnectionFactory
): ServiceContainer {
const container = new ServiceContainer(serviceName);
// Register connection factories
container.register({
name: 'mongodb',
factory: async () => {
const pool = await connectionFactory.createMongoDB({
name: 'default',
config: {} as any, // Config injected by factory
});
return pool.client;
},
singleton: true,
});
container.register({
name: 'postgres',
factory: async () => {
const pool = await connectionFactory.createPostgreSQL({
name: 'default',
config: {} as any, // Config injected by factory
});
return pool.client;
},
singleton: true,
});
container.register({
name: 'cache',
factory: () => {
const pool = connectionFactory.createCache({
name: 'default',
config: {} as any, // Config injected by factory
});
return pool.client;
},
singleton: true,
});
return container;
}

View file

@ -0,0 +1,61 @@
import type { MongoDBClientConfig } from '@stock-bot/mongodb-client';
import type { PostgreSQLClientConfig } from '@stock-bot/postgres-client';
import type { CacheOptions } from '@stock-bot/cache';
export interface ConnectionPoolConfig {
name: string;
poolSize?: number;
minConnections?: number;
maxConnections?: number;
idleTimeoutMillis?: number;
connectionTimeoutMillis?: number;
enableMetrics?: boolean;
}
export interface MongoDBPoolConfig extends ConnectionPoolConfig {
config: MongoDBClientConfig;
}
export interface PostgreSQLPoolConfig extends ConnectionPoolConfig {
config: PostgreSQLClientConfig;
}
export interface CachePoolConfig extends ConnectionPoolConfig {
config: CacheOptions;
}
export interface ConnectionFactoryConfig {
service: string;
environment: 'development' | 'production' | 'test';
pools?: {
mongodb?: Partial<MongoDBPoolConfig>;
postgres?: Partial<PostgreSQLPoolConfig>;
cache?: Partial<CachePoolConfig>;
};
}
export interface ConnectionPool<T> {
name: string;
client: T;
metrics: PoolMetrics;
health(): Promise<boolean>;
dispose(): Promise<void>;
}
export interface PoolMetrics {
created: Date;
totalConnections: number;
activeConnections: number;
idleConnections: number;
waitingRequests: number;
errors: number;
}
export interface ConnectionFactory {
createMongoDB(config: MongoDBPoolConfig): Promise<ConnectionPool<any>>;
createPostgreSQL(config: PostgreSQLPoolConfig): Promise<ConnectionPool<any>>;
createCache(config: CachePoolConfig): ConnectionPool<any>;
getPool(type: 'mongodb' | 'postgres' | 'cache', name: string): ConnectionPool<any> | undefined;
listPools(): Array<{ type: string; name: string; metrics: PoolMetrics }>;
disposeAll(): Promise<void>;
}

View file

@ -0,0 +1,16 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"rootDir": "./src",
"outDir": "./dist",
"composite": true
},
"include": ["src/**/*"],
"references": [
{ "path": "../config" },
{ "path": "../logger" },
{ "path": "../mongodb-client" },
{ "path": "../postgres-client" },
{ "path": "../cache" }
]
}

View file

@ -1,6 +1,6 @@
import { getLogger } from '@stock-bot/logger';
import { Collection, Db, MongoClient, OptionalUnlessRequiredId } from 'mongodb';
import type { DocumentBase, MongoDBClientConfig } from './types';
import type { DocumentBase, MongoDBClientConfig, PoolMetrics, ConnectionEvents, DynamicPoolConfig } from './types';
/**
* MongoDB Client for Stock Bot Data Service
@ -15,10 +15,23 @@ export class MongoDBClient {
private defaultDatabase: string;
private readonly logger = getLogger('mongodb-client');
private isConnected = false;
private readonly metrics: PoolMetrics;
private readonly events?: ConnectionEvents;
private dynamicPoolConfig?: DynamicPoolConfig;
private poolMonitorInterval?: Timer;
constructor(config: MongoDBClientConfig) {
constructor(config: MongoDBClientConfig, events?: ConnectionEvents) {
this.config = config;
this.defaultDatabase = config.database || 'stock';
this.events = events;
this.metrics = {
totalConnections: 0,
activeConnections: 0,
idleConnections: 0,
waitingRequests: 0,
errors: 0,
created: new Date(),
};
}
/**
@ -48,8 +61,38 @@ export class MongoDBClient {
this.db = this.client.db(this.defaultDatabase);
this.isConnected = true;
this.logger.info('Successfully connected to MongoDB');
// Update metrics
this.metrics.totalConnections = this.config.poolSettings?.maxPoolSize || 10;
this.metrics.idleConnections = this.metrics.totalConnections;
// Fire connection event
if (this.events?.onConnect) {
await Promise.resolve(this.events.onConnect());
}
// Fire pool created event
if (this.events?.onPoolCreated) {
await Promise.resolve(this.events.onPoolCreated());
}
this.logger.info('Successfully connected to MongoDB', {
database: this.defaultDatabase,
poolSize: this.metrics.totalConnections,
});
// Start pool monitoring if dynamic sizing is enabled
if (this.dynamicPoolConfig?.enabled) {
this.startPoolMonitoring();
}
} catch (error) {
this.metrics.errors++;
this.metrics.lastError = error instanceof Error ? error.message : 'Unknown error';
// Fire error event
if (this.events?.onError) {
await Promise.resolve(this.events.onError(error as Error));
}
this.logger.error('MongoDB connection failed:', error);
if (this.client) {
await this.client.close();
@ -68,10 +111,22 @@ export class MongoDBClient {
}
try {
// Stop pool monitoring
if (this.poolMonitorInterval) {
clearInterval(this.poolMonitorInterval);
this.poolMonitorInterval = undefined;
}
await this.client.close();
this.isConnected = false;
this.client = null;
this.db = null;
// Fire disconnect event
if (this.events?.onDisconnect) {
await Promise.resolve(this.events.onDisconnect());
}
this.logger.info('Disconnected from MongoDB');
} catch (error) {
this.logger.error('Error disconnecting from MongoDB:', error);
@ -350,4 +405,116 @@ export class MongoDBClient {
return `mongodb://${auth}${host}:${port}/${database}${authParam}`;
}
/**
* Get current pool metrics
*/
getPoolMetrics(): PoolMetrics {
// Update last used timestamp
this.metrics.lastUsed = new Date();
// Note: MongoDB driver doesn't expose detailed pool metrics
// These are estimates based on configuration
return { ...this.metrics };
}
/**
* Set dynamic pool configuration
*/
setDynamicPoolConfig(config: DynamicPoolConfig): void {
this.dynamicPoolConfig = config;
if (config.enabled && this.isConnected && !this.poolMonitorInterval) {
this.startPoolMonitoring();
} else if (!config.enabled && this.poolMonitorInterval) {
clearInterval(this.poolMonitorInterval);
this.poolMonitorInterval = undefined;
}
}
/**
* Start monitoring pool and adjust size dynamically
*/
private startPoolMonitoring(): void {
if (!this.dynamicPoolConfig || this.poolMonitorInterval) {
return;
}
this.poolMonitorInterval = setInterval(() => {
this.evaluatePoolSize();
}, this.dynamicPoolConfig.evaluationInterval);
}
/**
* Evaluate and adjust pool size based on usage
*/
private async evaluatePoolSize(): Promise<void> {
if (!this.dynamicPoolConfig || !this.client) {
return;
}
const { minSize, maxSize, scaleUpThreshold, scaleDownThreshold } = this.dynamicPoolConfig;
const currentSize = this.metrics.totalConnections;
const utilization = ((this.metrics.activeConnections / currentSize) * 100);
this.logger.debug('Pool utilization', {
utilization: `${utilization.toFixed(1)}%`,
active: this.metrics.activeConnections,
total: currentSize,
});
// Scale up if utilization is high
if (utilization > scaleUpThreshold && currentSize < maxSize) {
const newSize = Math.min(currentSize + this.dynamicPoolConfig.scaleUpIncrement, maxSize);
await this.resizePool(newSize);
this.logger.info('Scaling up connection pool', { from: currentSize, to: newSize, utilization });
}
// Scale down if utilization is low
else if (utilization < scaleDownThreshold && currentSize > minSize) {
const newSize = Math.max(currentSize - this.dynamicPoolConfig.scaleDownIncrement, minSize);
await this.resizePool(newSize);
this.logger.info('Scaling down connection pool', { from: currentSize, to: newSize, utilization });
}
}
/**
* Resize the connection pool
* Note: MongoDB driver doesn't support dynamic resizing, this would require reconnection
*/
private async resizePool(newSize: number): Promise<void> {
// MongoDB doesn't support dynamic pool resizing
// This is a placeholder for future implementation
this.logger.warn('Dynamic pool resizing not yet implemented for MongoDB', { requestedSize: newSize });
// Update metrics to reflect desired state
this.metrics.totalConnections = newSize;
}
/**
* Enable pool warmup on connect
*/
async warmupPool(): Promise<void> {
if (!this.client || !this.isConnected) {
throw new Error('Client not connected');
}
const minSize = this.config.poolSettings?.minPoolSize || 1;
const promises: Promise<void>[] = [];
// Create minimum connections by running parallel pings
for (let i = 0; i < minSize; i++) {
promises.push(
this.client.db(this.defaultDatabase).admin().ping()
.then(() => {
this.logger.debug(`Warmed up connection ${i + 1}/${minSize}`);
})
.catch(error => {
this.logger.warn(`Failed to warm up connection ${i + 1}`, { error });
})
);
}
await Promise.allSettled(promises);
this.logger.info('Connection pool warmup complete', { connections: minSize });
}
}

View file

@ -1,20 +1,21 @@
import { MongoDBClient } from './client';
import type { MongoDBClientConfig } from './types';
import type { MongoDBClientConfig, ConnectionEvents } from './types';
/**
* Factory function to create a MongoDB client instance
*/
export function createMongoDBClient(config: MongoDBClientConfig): MongoDBClient {
return new MongoDBClient(config);
export function createMongoDBClient(config: MongoDBClientConfig, events?: ConnectionEvents): MongoDBClient {
return new MongoDBClient(config, events);
}
/**
* Create and connect a MongoDB client
*/
export async function createAndConnectMongoDBClient(
config: MongoDBClientConfig
config: MongoDBClientConfig,
events?: ConnectionEvents
): Promise<MongoDBClient> {
const client = createMongoDBClient(config);
const client = createMongoDBClient(config, events);
await client.connect();
return client;
}

View file

@ -20,6 +20,9 @@ export type {
RawDocument,
SecFiling,
SentimentData,
PoolMetrics,
ConnectionEvents,
DynamicPoolConfig,
} from './types';
// Factory functions
@ -28,10 +31,4 @@ export {
createAndConnectMongoDBClient,
} from './factory';
// Singleton instance
export {
getMongoDBClient,
connectMongoDB,
getDatabase,
disconnectMongoDB,
} from './singleton';
// Singleton pattern removed - use factory functions instead

View file

@ -1,82 +0,0 @@
import { MongoDBClient } from './client';
import type { MongoDBClientConfig } from './types';
import type { Db } from 'mongodb';
/**
* Singleton MongoDB client instance
* Provides global access to a single MongoDB connection
*/
let instance: MongoDBClient | null = null;
let initPromise: Promise<MongoDBClient> | null = null;
/**
* Initialize the singleton MongoDB client
*/
export async function connectMongoDB(config?: MongoDBClientConfig): Promise<MongoDBClient> {
if (instance) {
return instance;
}
if (initPromise) {
return initPromise;
}
if (!config) {
throw new Error('MongoDB client not initialized. Call connectMongoDB(config) first.');
}
initPromise = (async () => {
const client = new MongoDBClient(config);
await client.connect();
instance = client;
return client;
})();
try {
return await initPromise;
} catch (error) {
// Reset promise on error so next call can retry
initPromise = null;
throw error;
}
}
/**
* Get the singleton MongoDB client instance
* @throws Error if not initialized
*/
export function getMongoDBClient(): MongoDBClient {
if (!instance) {
throw new Error('MongoDB client not initialized. Call connectMongoDB(config) first.');
}
return instance;
}
/**
* Get the MongoDB database instance
* @throws Error if not initialized
*/
export function getDatabase(): Db {
if (!instance) {
throw new Error('MongoDB client not initialized. Call connectMongoDB(config) first.');
}
return instance.getDatabase();
}
/**
* Check if the MongoDB client is initialized
*/
export function isInitialized(): boolean {
return instance !== null && instance.connected;
}
/**
* Disconnect and reset the singleton instance
*/
export async function disconnectMongoDB(): Promise<void> {
if (instance) {
await instance.disconnect();
instance = null;
}
initPromise = null;
}

View file

@ -43,6 +43,36 @@ export interface MongoDBConnectionOptions {
healthCheckInterval?: number;
}
export interface PoolMetrics {
totalConnections: number;
activeConnections: number;
idleConnections: number;
waitingRequests: number;
errors: number;
lastError?: string;
avgResponseTime?: number;
created: Date;
lastUsed?: Date;
}
export interface ConnectionEvents {
onConnect?: () => void | Promise<void>;
onDisconnect?: () => void | Promise<void>;
onError?: (error: Error) => void | Promise<void>;
onPoolCreated?: () => void | Promise<void>;
}
export interface DynamicPoolConfig {
enabled: boolean;
minSize: number;
maxSize: number;
scaleUpThreshold: number; // % of pool in use (0-100)
scaleDownThreshold: number; // % of pool idle (0-100)
scaleUpIncrement: number; // connections to add
scaleDownIncrement: number; // connections to remove
evaluationInterval: number; // ms between checks
}
/**
* Health Status Types
*/

View file

@ -8,6 +8,9 @@ import type {
PostgreSQLConnectionOptions,
QueryResult,
TransactionCallback,
PoolMetrics,
ConnectionEvents,
DynamicPoolConfig,
} from './types';
/**
@ -24,8 +27,12 @@ export class PostgreSQLClient {
private readonly healthMonitor: PostgreSQLHealthMonitor;
private readonly transactionManager: PostgreSQLTransactionManager;
private isConnected = false;
private readonly metrics: PoolMetrics;
private readonly events?: ConnectionEvents;
private dynamicPoolConfig?: DynamicPoolConfig;
private poolMonitorInterval?: NodeJS.Timeout;
constructor(config: PostgreSQLClientConfig, options?: PostgreSQLConnectionOptions) {
constructor(config: PostgreSQLClientConfig, options?: PostgreSQLConnectionOptions, events?: ConnectionEvents) {
this.config = config;
this.options = {
retryAttempts: 3,
@ -33,10 +40,20 @@ export class PostgreSQLClient {
healthCheckInterval: 30000,
...options,
};
this.events = events;
this.logger = getLogger('postgres-client');
this.healthMonitor = new PostgreSQLHealthMonitor(this);
this.transactionManager = new PostgreSQLTransactionManager(this);
this.metrics = {
totalConnections: 0,
activeConnections: 0,
idleConnections: 0,
waitingRequests: 0,
errors: 0,
created: new Date(),
};
}
/**
@ -63,17 +80,51 @@ export class PostgreSQLClient {
client.release();
this.isConnected = true;
this.logger.info('Successfully connected to PostgreSQL');
// Update metrics
const poolConfig = this.config.poolSettings;
this.metrics.totalConnections = poolConfig?.max || 10;
this.metrics.idleConnections = poolConfig?.min || 2;
// Fire connection event
if (this.events?.onConnect) {
await Promise.resolve(this.events.onConnect());
}
// Fire pool created event
if (this.events?.onPoolCreated) {
await Promise.resolve(this.events.onPoolCreated());
}
this.logger.info('Successfully connected to PostgreSQL', {
poolSize: this.metrics.totalConnections,
});
// Start health monitoring
this.healthMonitor.start();
// Setup error handlers
this.setupErrorHandlers();
// Setup pool event listeners for metrics
this.setupPoolMetrics();
// Start dynamic pool monitoring if enabled
if (this.dynamicPoolConfig?.enabled) {
this.startPoolMonitoring();
}
return;
} catch (error) {
lastError = error as Error;
this.metrics.errors++;
this.metrics.lastError = lastError.message;
// Fire error event
if (this.events?.onError) {
await Promise.resolve(this.events.onError(lastError));
}
this.logger.error(`PostgreSQL connection attempt ${attempt} failed:`, error);
if (this.pool) {
@ -101,10 +152,22 @@ export class PostgreSQLClient {
}
try {
// Stop pool monitoring
if (this.poolMonitorInterval) {
clearInterval(this.poolMonitorInterval);
this.poolMonitorInterval = undefined;
}
this.healthMonitor.stop();
await this.pool.end();
this.isConnected = false;
this.pool = null;
// Fire disconnect event
if (this.events?.onDisconnect) {
await Promise.resolve(this.events.onDisconnect());
}
this.logger.info('Disconnected from PostgreSQL');
} catch (error) {
this.logger.error('Error disconnecting from PostgreSQL:', error);
@ -411,4 +474,132 @@ export class PostgreSQLClient {
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* Get current pool metrics
*/
getPoolMetrics(): PoolMetrics {
// Update last used timestamp
this.metrics.lastUsed = new Date();
// Update metrics from pool if available
if (this.pool) {
this.metrics.totalConnections = this.pool.totalCount;
this.metrics.idleConnections = this.pool.idleCount;
this.metrics.waitingRequests = this.pool.waitingCount;
this.metrics.activeConnections = this.metrics.totalConnections - this.metrics.idleConnections;
}
return { ...this.metrics };
}
/**
* Set dynamic pool configuration
*/
setDynamicPoolConfig(config: DynamicPoolConfig): void {
this.dynamicPoolConfig = config;
if (config.enabled && this.isConnected && !this.poolMonitorInterval) {
this.startPoolMonitoring();
} else if (!config.enabled && this.poolMonitorInterval) {
clearInterval(this.poolMonitorInterval);
this.poolMonitorInterval = undefined;
}
}
/**
* Start monitoring pool and adjust size dynamically
*/
private startPoolMonitoring(): void {
if (!this.dynamicPoolConfig || this.poolMonitorInterval) {
return;
}
this.poolMonitorInterval = setInterval(() => {
this.evaluatePoolSize();
}, this.dynamicPoolConfig.evaluationInterval);
}
/**
* Setup pool event listeners for metrics
*/
private setupPoolMetrics(): void {
if (!this.pool) {
return;
}
// Track when connections are acquired
this.pool.on('acquire', () => {
this.metrics.activeConnections++;
this.metrics.idleConnections--;
});
// Track when connections are released
this.pool.on('release', () => {
this.metrics.activeConnections--;
this.metrics.idleConnections++;
});
}
/**
* Evaluate and adjust pool size based on usage
*/
private async evaluatePoolSize(): Promise<void> {
if (!this.dynamicPoolConfig || !this.pool) {
return;
}
const metrics = this.getPoolMetrics();
const { minSize, maxSize, scaleUpThreshold, scaleDownThreshold } = this.dynamicPoolConfig;
const currentSize = metrics.totalConnections;
const utilization = currentSize > 0 ? ((metrics.activeConnections / currentSize) * 100) : 0;
this.logger.debug('Pool utilization', {
utilization: `${utilization.toFixed(1)}%`,
active: metrics.activeConnections,
total: currentSize,
waiting: metrics.waitingRequests,
});
// Scale up if utilization is high or there are waiting requests
if ((utilization > scaleUpThreshold || metrics.waitingRequests > 0) && currentSize < maxSize) {
const newSize = Math.min(currentSize + this.dynamicPoolConfig.scaleUpIncrement, maxSize);
this.logger.info('Would scale up connection pool', { from: currentSize, to: newSize, utilization });
// Note: pg module doesn't support dynamic resizing, would need reconnection
}
// Scale down if utilization is low
else if (utilization < scaleDownThreshold && currentSize > minSize) {
const newSize = Math.max(currentSize - this.dynamicPoolConfig.scaleDownIncrement, minSize);
this.logger.info('Would scale down connection pool', { from: currentSize, to: newSize, utilization });
// Note: pg module doesn't support dynamic resizing, would need reconnection
}
}
/**
* Enable pool warmup on connect
*/
async warmupPool(): Promise<void> {
if (!this.pool || !this.isConnected) {
throw new Error('Client not connected');
}
const minSize = this.config.poolSettings?.min || 2;
const promises: Promise<void>[] = [];
// Create minimum connections by running parallel queries
for (let i = 0; i < minSize; i++) {
promises.push(
this.pool.query('SELECT 1')
.then(() => {
this.logger.debug(`Warmed up connection ${i + 1}/${minSize}`);
})
.catch(error => {
this.logger.warn(`Failed to warm up connection ${i + 1}`, { error });
})
);
}
await Promise.allSettled(promises);
this.logger.info('Connection pool warmup complete', { connections: minSize });
}
}

View file

@ -1,14 +1,15 @@
import { PostgreSQLClient } from './client';
import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions } from './types';
import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions, ConnectionEvents } from './types';
/**
* Factory function to create a PostgreSQL client instance
*/
export function createPostgreSQLClient(
config: PostgreSQLClientConfig,
options?: PostgreSQLConnectionOptions
options?: PostgreSQLConnectionOptions,
events?: ConnectionEvents
): PostgreSQLClient {
return new PostgreSQLClient(config, options);
return new PostgreSQLClient(config, options, events);
}
/**
@ -16,9 +17,10 @@ export function createPostgreSQLClient(
*/
export async function createAndConnectPostgreSQLClient(
config: PostgreSQLClientConfig,
options?: PostgreSQLConnectionOptions
options?: PostgreSQLConnectionOptions,
events?: ConnectionEvents
): Promise<PostgreSQLClient> {
const client = createPostgreSQLClient(config, options);
const client = createPostgreSQLClient(config, options, events);
await client.connect();
return client;
}

View file

@ -28,6 +28,9 @@ export type {
Strategy,
RiskLimit,
AuditLog,
PoolMetrics,
ConnectionEvents,
DynamicPoolConfig,
} from './types';
// Factory functions
@ -36,9 +39,4 @@ export {
createAndConnectPostgreSQLClient,
} from './factory';
// Singleton instance
export {
getPostgreSQLClient,
connectPostgreSQL,
disconnectPostgreSQL,
} from './singleton';
// Singleton pattern removed - use factory functions instead

View file

@ -1,50 +0,0 @@
import { PostgreSQLClient } from './client';
import type { PostgreSQLClientConfig } from './types';
/**
* Singleton PostgreSQL client instance
* Provides global access to a single PostgreSQL connection pool
*/
let instance: PostgreSQLClient | null = null;
/**
* Initialize the singleton PostgreSQL client
*/
export async function connectPostgreSQL(config?: PostgreSQLClientConfig): Promise<PostgreSQLClient> {
if (!instance) {
if (!config) {
throw new Error('PostgreSQL client not initialized. Call connectPostgreSQL(config) first.');
}
instance = new PostgreSQLClient(config);
await instance.connect();
}
return instance;
}
/**
* Get the singleton PostgreSQL client instance
* @throws Error if not initialized
*/
export function getPostgreSQLClient(): PostgreSQLClient {
if (!instance) {
throw new Error('PostgreSQL client not initialized. Call connectPostgreSQL(config) first.');
}
return instance;
}
/**
* Check if the PostgreSQL client is initialized
*/
export function isInitialized(): boolean {
return instance !== null && instance.connected;
}
/**
* Disconnect and reset the singleton instance
*/
export async function disconnectPostgreSQL(): Promise<void> {
if (instance) {
await instance.disconnect();
instance = null;
}
}

View file

@ -36,6 +36,36 @@ export interface PostgreSQLConnectionOptions {
healthCheckInterval?: number;
}
export interface PoolMetrics {
totalConnections: number;
activeConnections: number;
idleConnections: number;
waitingRequests: number;
errors: number;
lastError?: string;
avgResponseTime?: number;
created: Date;
lastUsed?: Date;
}
export interface ConnectionEvents {
onConnect?: () => void | Promise<void>;
onDisconnect?: () => void | Promise<void>;
onError?: (error: Error) => void | Promise<void>;
onPoolCreated?: () => void | Promise<void>;
}
export interface DynamicPoolConfig {
enabled: boolean;
minSize: number;
maxSize: number;
scaleUpThreshold: number; // % of pool in use (0-100)
scaleDownThreshold: number; // % of pool idle (0-100)
scaleUpIncrement: number; // connections to add
scaleDownIncrement: number; // connections to remove
evaluationInterval: number; // ms between checks
}
/**
* Health Status Types
*/

View file

@ -2,32 +2,34 @@
"name": "@stock-bot/utils",
"version": "1.0.0",
"description": "Common utility functions for stock-bot services",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"main": "./src/index.ts",
"types": "./src/index.ts",
"scripts": {
"build": "tsc",
"clean": "rimraf dist",
"test": "bun test"
},
"dependencies": {
"@stock-bot/types": "*",
"@stock-bot/config": "workspace:*",
"@stock-bot/logger": "workspace:*",
"@stock-bot/cache": "workspace:*",
"@stock-bot/postgres-client": "workspace:*",
"@stock-bot/mongodb-client": "workspace:*",
"@stock-bot/connection-factory": "workspace:*",
"@stock-bot/types": "workspace:*",
"@stock-bot/http": "workspace:*",
"cheerio": "^1.0.0",
"axios": "^1.7.7",
"axios-rate-limit": "^1.4.0",
"axios-retry": "^4.4.1",
"socks-proxy-agent": "^8.0.2",
"p-limit": "^6.1.0",
"zod": "^3.22.4",
"date-fns": "^2.30.0"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}
}
}

View file

@ -11,11 +11,23 @@
import { createCache, type CacheProvider } from '@stock-bot/cache';
import { getLogger, type Logger } from '@stock-bot/logger';
import { getDatabaseConfig } from '@stock-bot/config';
import type { ServiceResolver } from '@stock-bot/connection-factory';
import type { MongoDBClient } from '@stock-bot/mongodb-client';
import type { PostgreSQLClient } from '@stock-bot/postgres-client';
export interface OperationContextOptions {
handlerName: string;
operationName: string;
parentLogger?: Logger;
container?: ServiceResolver;
}
export class OperationContext {
public readonly logger: Logger;
public readonly mongodb: any; // MongoDB client - imported dynamically
public readonly postgres: any; // PostgreSQL client - imported dynamically
private readonly container?: ServiceResolver;
private _mongodb?: MongoDBClient;
private _postgres?: PostgreSQLClient;
private _cache?: CacheProvider;
private static sharedCache: CacheProvider | null = null;
private static parentLoggers = new Map<string, Logger>();
@ -24,21 +36,64 @@ export class OperationContext {
constructor(
public readonly handlerName: string,
public readonly operationName: string,
parentLogger?: Logger
parentLoggerOrOptions?: Logger | OperationContextOptions
) {
// Create child logger from parent or create handler parent
const parent = parentLogger || this.getOrCreateParentLogger();
this.logger = parent.child(operationName, {
handler: handlerName,
operation: operationName
});
// Set up database access
this.mongodb = this.getDatabaseClient('mongodb');
this.postgres = this.getDatabaseClient('postgres');
// Handle both old and new constructor signatures
if (parentLoggerOrOptions && 'container' in parentLoggerOrOptions) {
const options = parentLoggerOrOptions;
this.container = options.container;
const parent = options.parentLogger || this.getOrCreateParentLogger();
this.logger = parent.child(operationName, {
handler: handlerName,
operation: operationName
});
} else {
// Legacy support
const parentLogger = parentLoggerOrOptions as Logger | undefined;
const parent = parentLogger || this.getOrCreateParentLogger();
this.logger = parent.child(operationName, {
handler: handlerName,
operation: operationName
});
}
}
private getDatabaseClient(type: 'mongodb' | 'postgres'): any {
// Lazy load MongoDB client
get mongodb(): MongoDBClient {
if (!this._mongodb) {
if (this.container) {
try {
this._mongodb = this.container.resolve<MongoDBClient>('mongodb');
} catch (error) {
this.logger.warn('Failed to resolve MongoDB from container, falling back to singleton', { error });
this._mongodb = this.getLegacyDatabaseClient('mongodb') as MongoDBClient;
}
} else {
this._mongodb = this.getLegacyDatabaseClient('mongodb') as MongoDBClient;
}
}
return this._mongodb!;
}
// Lazy load PostgreSQL client
get postgres(): PostgreSQLClient {
if (!this._postgres) {
if (this.container) {
try {
this._postgres = this.container.resolve<PostgreSQLClient>('postgres');
} catch (error) {
this.logger.warn('Failed to resolve PostgreSQL from container, falling back to singleton', { error });
this._postgres = this.getLegacyDatabaseClient('postgres') as PostgreSQLClient;
}
} else {
this._postgres = this.getLegacyDatabaseClient('postgres') as PostgreSQLClient;
}
}
return this._postgres!;
}
// Legacy method for backward compatibility
private getLegacyDatabaseClient(type: 'mongodb' | 'postgres'): any {
try {
if (type === 'mongodb') {
// Dynamic import to avoid TypeScript issues during build
@ -71,6 +126,23 @@ export class OperationContext {
* Keys are automatically prefixed as: "operations:handlerName:operationName:key"
*/
get cache(): CacheProvider {
if (!this._cache) {
if (this.container) {
try {
const baseCache = this.container.resolve<CacheProvider>('cache');
this._cache = this.createContextualCache(baseCache);
} catch (error) {
this.logger.warn('Failed to resolve cache from container, using shared cache', { error });
this._cache = this.getOrCreateSharedCache();
}
} else {
this._cache = this.getOrCreateSharedCache();
}
}
return this._cache!;
}
private getOrCreateSharedCache(): CacheProvider {
if (!OperationContext.sharedCache) {
// Get Redis configuration from database config
if (!OperationContext.databaseConfig) {
@ -91,28 +163,28 @@ export class OperationContext {
redisConfig
});
}
return this.createContextualCache();
return this.createContextualCache(OperationContext.sharedCache);
}
private createContextualCache(): CacheProvider {
private createContextualCache(baseCache: CacheProvider): CacheProvider {
const contextPrefix = `${this.handlerName}:${this.operationName}:`;
// Return a proxy that automatically prefixes keys with context
return {
async get<T>(key: string): Promise<T | null> {
return OperationContext.sharedCache!.get(`${contextPrefix}${key}`);
return baseCache.get(`${contextPrefix}${key}`);
},
async set<T>(key: string, value: T, options?: any): Promise<T | null> {
return OperationContext.sharedCache!.set(`${contextPrefix}${key}`, value, options);
return baseCache.set(`${contextPrefix}${key}`, value, options);
},
async del(key: string): Promise<void> {
return OperationContext.sharedCache!.del(`${contextPrefix}${key}`);
return baseCache.del(`${contextPrefix}${key}`);
},
async exists(key: string): Promise<boolean> {
return OperationContext.sharedCache!.exists(`${contextPrefix}${key}`);
return baseCache.exists(`${contextPrefix}${key}`);
},
async clear(): Promise<void> {
@ -122,23 +194,23 @@ export class OperationContext {
async keys(pattern: string): Promise<string[]> {
const fullPattern = `${contextPrefix}${pattern}`;
return OperationContext.sharedCache!.keys(fullPattern);
return baseCache.keys(fullPattern);
},
getStats() {
return OperationContext.sharedCache!.getStats();
return baseCache.getStats();
},
async health(): Promise<boolean> {
return OperationContext.sharedCache!.health();
return baseCache.health();
},
async waitForReady(timeout?: number): Promise<void> {
return OperationContext.sharedCache!.waitForReady(timeout);
return baseCache.waitForReady(timeout);
},
isReady(): boolean {
return OperationContext.sharedCache!.isReady();
return baseCache.isReady();
}
} as CacheProvider;
}
@ -146,8 +218,15 @@ export class OperationContext {
/**
* Factory method to create OperationContext
*/
static create(handlerName: string, operationName: string, parentLogger?: Logger): OperationContext {
return new OperationContext(handlerName, operationName, parentLogger);
static create(handlerName: string, operationName: string, parentLoggerOrOptions?: Logger | OperationContextOptions): OperationContext {
if (parentLoggerOrOptions && 'container' in parentLoggerOrOptions) {
return new OperationContext(handlerName, operationName, {
...parentLoggerOrOptions,
handlerName,
operationName
});
}
return new OperationContext(handlerName, operationName, parentLoggerOrOptions as Logger | undefined);
}
/**
@ -161,12 +240,38 @@ export class OperationContext {
* Create a child context for sub-operations
*/
createChild(subOperationName: string): OperationContext {
if (this.container) {
return new OperationContext(
this.handlerName,
`${this.operationName}:${subOperationName}`,
{
handlerName: this.handlerName,
operationName: `${this.operationName}:${subOperationName}`,
parentLogger: this.logger,
container: this.container
}
);
}
return new OperationContext(
this.handlerName,
`${this.operationName}:${subOperationName}`,
this.logger
);
}
/**
* Dispose of resources if using container-based connections
* This is a no-op for legacy singleton connections
*/
async dispose(): Promise<void> {
// If using container, it will handle cleanup
// For singleton connections, they persist
this.logger.debug('OperationContext disposed', {
handler: this.handlerName,
operation: this.operationName,
hasContainer: !!this.container
});
}
}
export default OperationContext;

View file

@ -10,6 +10,9 @@
{ "path": "../cache" },
{ "path": "../config" },
{ "path": "../logger" },
{ "path": "../http" }
{ "path": "../http" },
{ "path": "../connection-factory" },
{ "path": "../mongodb-client" },
{ "path": "../postgres-client" }
]
}