logger catagorizing libs

This commit is contained in:
Boki 2025-06-20 07:53:53 -04:00
parent 7a8e542ada
commit 4726a85cf3
12 changed files with 35 additions and 36 deletions

View file

@ -48,7 +48,7 @@ export class RedisConnectionManager {
const uniqueName = `${name}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`; const uniqueName = `${name}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const connection = this.createConnection(uniqueName, redisConfig, db); const connection = this.createConnection(uniqueName, redisConfig, db);
this.connections.set(uniqueName, connection); this.connections.set(uniqueName, connection);
this.logger.info(`Created unique Redis connection: ${uniqueName}`); this.logger.debug(`Created unique Redis connection: ${uniqueName}`);
return connection; return connection;
} }
} }
@ -96,11 +96,11 @@ export class RedisConnectionManager {
}); });
redis.on('close', () => { redis.on('close', () => {
this.logger.info(`Redis connection closed: ${name}`); this.logger.warn(`Redis connection closed: ${name}`);
}); });
redis.on('reconnecting', () => { redis.on('reconnecting', () => {
this.logger.info(`Redis reconnecting: ${name}`); this.logger.warn(`Redis reconnecting: ${name}`);
}); });
return redis; return redis;
@ -113,7 +113,7 @@ export class RedisConnectionManager {
try { try {
await connection.quit(); await connection.quit();
} catch (error) { } catch (error) {
this.logger.error('Error closing Redis connection:', error); this.logger.warn('Error closing Redis connection:', error);
} }
} }
@ -202,7 +202,7 @@ export class RedisConnectionManager {
const allConnections = new Map([...instance.connections, ...this.sharedConnections]); const allConnections = new Map([...instance.connections, ...this.sharedConnections]);
if (allConnections.size === 0) { if (allConnections.size === 0) {
instance.logger.info('No Redis connections to wait for'); instance.logger.debug('No Redis connections to wait for');
return; return;
} }
@ -233,7 +233,7 @@ export class RedisConnectionManager {
const onReady = () => { const onReady = () => {
clearTimeout(timeoutId); clearTimeout(timeoutId);
RedisConnectionManager.readyConnections.add(name); RedisConnectionManager.readyConnections.add(name);
this.logger.info(`Redis connection ready: ${name}`); this.logger.debug(`Redis connection ready: ${name}`);
resolve(); resolve();
}; };

View file

@ -79,7 +79,7 @@ export class RedisCache implements CacheProvider {
}); });
this.redis.on('reconnecting', () => { this.redis.on('reconnecting', () => {
this.logger.info('Redis cache reconnecting...'); this.logger.warn('Redis cache reconnecting...');
}); });
} }
@ -282,7 +282,7 @@ export class RedisCache implements CacheProvider {
const keys = await this.redis.keys(pattern); const keys = await this.redis.keys(pattern);
if (keys.length > 0) { if (keys.length > 0) {
await this.redis.del(...keys); await this.redis.del(...keys);
this.logger.info('Cache cleared', { keysDeleted: keys.length }); this.logger.warn('Cache cleared', { keysDeleted: keys.length });
} }
}, },
undefined, undefined,

View file

@ -108,7 +108,7 @@ export class HttpClient {
const timeoutPromise = new Promise<never>((_, reject) => { const timeoutPromise = new Promise<never>((_, reject) => {
timeoutId = setTimeout(() => { timeoutId = setTimeout(() => {
const elapsed = Date.now() - startTime; const elapsed = Date.now() - startTime;
this.logger?.debug('Request timeout triggered', { this.logger?.warn('Request timeout triggered', {
url: config.url, url: config.url,
method: config.method, method: config.method,
timeout, timeout,
@ -142,7 +142,7 @@ export class HttpClient {
return response; return response;
} catch (error) { } catch (error) {
const elapsed = Date.now() - startTime; const elapsed = Date.now() - startTime;
this.logger?.debug('Adapter failed successful', { this.logger?.debug('Adapter request failed', {
url: config.url, url: config.url,
elapsedMs: elapsed, elapsedMs: elapsed,
}); });

View file

@ -32,7 +32,6 @@ export class ProxyManager {
switch (proxy.protocol) { switch (proxy.protocol) {
case 'socks4': case 'socks4':
case 'socks5': case 'socks5':
// console.log(`Using SOCKS proxy: ${proxyUrl}`);
return new SocksProxyAgent(proxyUrl); return new SocksProxyAgent(proxyUrl);
case 'http': case 'http':
return new HttpProxyAgent(proxyUrl); return new HttpProxyAgent(proxyUrl);

View file

@ -86,7 +86,7 @@ export class MongoDBClient {
this.defaultDatabase = databaseName; this.defaultDatabase = databaseName;
if (this.client) { if (this.client) {
this.db = this.client.db(databaseName); this.db = this.client.db(databaseName);
this.logger.info(`Default database changed to: ${databaseName}`); this.logger.debug(`Default database changed to: ${databaseName}`);
} }
} }

View file

@ -77,7 +77,7 @@ export class QuestDBClient {
return; return;
} catch (error) { } catch (error) {
lastError = error as Error; lastError = error as Error;
this.logger.error(`QuestDB connection attempt ${attempt} failed:`, error); this.logger.warn(`QuestDB connection attempt ${attempt} failed:`, error);
if (this.pgPool) { if (this.pgPool) {
await this.pgPool.end(); await this.pgPool.end();
@ -362,7 +362,7 @@ export class QuestDBClient {
*/ */
async optimizeTable(tableName: string): Promise<void> { async optimizeTable(tableName: string): Promise<void> {
await this.query(`VACUUM TABLE ${tableName}`); await this.query(`VACUUM TABLE ${tableName}`);
this.logger.info(`Optimized table: ${tableName}`); this.logger.debug(`Optimized table: ${tableName}`);
} }
/** /**

View file

@ -188,7 +188,7 @@ export class QuestDBHealthMonitor {
memoryUsage: process.memoryUsage().heapUsed, memoryUsage: process.memoryUsage().heapUsed,
}; };
this.logger.info('Performance metrics reset'); this.logger.debug('Performance metrics reset');
} }
/** /**
@ -229,6 +229,6 @@ export class QuestDBHealthMonitor {
*/ */
public destroy(): void { public destroy(): void {
this.stopMonitoring(); this.stopMonitoring();
this.logger.info('Health monitor destroyed'); this.logger.debug('Health monitor destroyed');
} }
} }

View file

@ -150,7 +150,7 @@ export class QuestDBInfluxWriter {
}); });
if (Object.keys(fields).length === 0) { if (Object.keys(fields).length === 0) {
this.logger.warn('No analytics fields to write', { symbol, timestamp: analytics.timestamp }); this.logger.debug('No analytics fields to write', { symbol, timestamp: analytics.timestamp });
return; return;
} }
@ -345,7 +345,7 @@ export class QuestDBInfluxWriter {
return; return;
} catch (error) { } catch (error) {
attempt++; attempt++;
this.logger.error(`Write attempt ${attempt} failed`, { this.logger.warn(`Write attempt ${attempt} failed`, {
error, error,
linesCount: lines.length, linesCount: lines.length,
willRetry: attempt <= options.retryAttempts, willRetry: attempt <= options.retryAttempts,
@ -425,6 +425,6 @@ export class QuestDBInfluxWriter {
*/ */
public destroy(): void { public destroy(): void {
this.clearBuffer(); this.clearBuffer();
this.logger.info('InfluxDB writer destroyed'); this.logger.debug('InfluxDB writer destroyed');
} }
} }

View file

@ -178,11 +178,11 @@ export class QuestDBSchemaManager {
try { try {
await this.client.query(sql); await this.client.query(sql);
this.logger.info(`Table ${schema.tableName} created`, { sql }); this.logger.debug(`Table ${schema.tableName} created`, { sql });
} catch (error) { } catch (error) {
// Check if table already exists // Check if table already exists
if (error instanceof Error && error.message.includes('already exists')) { if (error instanceof Error && error.message.includes('already exists')) {
this.logger.info(`Table ${schema.tableName} already exists`); this.logger.debug(`Table ${schema.tableName} already exists`);
return; return;
} }
throw error; throw error;
@ -197,7 +197,7 @@ export class QuestDBSchemaManager {
try { try {
await this.client.query(sql); await this.client.query(sql);
this.logger.info(`Table ${tableName} dropped`); this.logger.warn(`Table ${tableName} dropped`);
} catch (error) { } catch (error) {
this.logger.error(`Failed to drop table ${tableName}`, error); this.logger.error(`Failed to drop table ${tableName}`, error);
throw error; throw error;
@ -234,7 +234,7 @@ export class QuestDBSchemaManager {
*/ */
public addSchema(schema: TableSchema): void { public addSchema(schema: TableSchema): void {
this.schemas.set(schema.tableName, schema); this.schemas.set(schema.tableName, schema);
this.logger.info(`Schema added for table: ${schema.tableName}`); this.logger.debug(`Schema added for table: ${schema.tableName}`);
} }
/** /**
@ -256,7 +256,7 @@ export class QuestDBSchemaManager {
// QuestDB automatically optimizes, but we can analyze table stats // QuestDB automatically optimizes, but we can analyze table stats
try { try {
const stats = await this.getTableStats(tableName); const stats = await this.getTableStats(tableName);
this.logger.info(`Table ${tableName} stats`, stats); this.logger.debug(`Table ${tableName} stats`, stats);
} catch (error) { } catch (error) {
this.logger.error(`Failed to optimize table ${tableName}`, error); this.logger.error(`Failed to optimize table ${tableName}`, error);
throw error; throw error;
@ -289,7 +289,7 @@ export class QuestDBSchemaManager {
public async truncateTable(tableName: string): Promise<void> { public async truncateTable(tableName: string): Promise<void> {
try { try {
await this.client.query(`TRUNCATE TABLE ${tableName}`); await this.client.query(`TRUNCATE TABLE ${tableName}`);
this.logger.info(`Table ${tableName} truncated`); this.logger.warn(`Table ${tableName} truncated`);
} catch (error) { } catch (error) {
this.logger.error(`Failed to truncate table ${tableName}`, error); this.logger.error(`Failed to truncate table ${tableName}`, error);
throw error; throw error;
@ -302,7 +302,7 @@ export class QuestDBSchemaManager {
public async createPartitions(tableName: string, _days: number = 30): Promise<void> { public async createPartitions(tableName: string, _days: number = 30): Promise<void> {
// QuestDB handles partitioning automatically based on the PARTITION BY clause // QuestDB handles partitioning automatically based on the PARTITION BY clause
// This method is for future extensibility // This method is for future extensibility
this.logger.info(`Partitioning is automatic for table ${tableName}`); this.logger.debug(`Partitioning is automatic for table ${tableName}`);
} }
/** /**

View file

@ -166,7 +166,7 @@ export async function processBatchJob(jobData: BatchJobData, queueName: string):
queueManager.getQueue(queueName); queueManager.getQueue(queueName);
const { payloadKey, batchIndex, totalBatches, itemCount, totalDelayHours } = jobData; const { payloadKey, batchIndex, totalBatches, itemCount, totalDelayHours } = jobData;
logger.debug('Processing batch job', { logger.trace('Processing batch job', {
batchIndex, batchIndex,
totalBatches, totalBatches,
itemCount, itemCount,
@ -187,7 +187,7 @@ export async function processBatchJob(jobData: BatchJobData, queueName: string):
const delayPerBatch = totalDelayMs / totalBatches; // Time allocated for each batch const delayPerBatch = totalDelayMs / totalBatches; // Time allocated for each batch
const delayPerItem = delayPerBatch / items.length; // Distribute items evenly within batch window const delayPerItem = delayPerBatch / items.length; // Distribute items evenly within batch window
logger.debug('Calculating job delays', { logger.trace('Calculating job delays', {
batchIndex, batchIndex,
delayPerBatch: `${(delayPerBatch / 1000 / 60).toFixed(2)} minutes`, delayPerBatch: `${(delayPerBatch / 1000 / 60).toFixed(2)} minutes`,
delayPerItem: `${(delayPerItem / 1000).toFixed(2)} seconds`, delayPerItem: `${(delayPerItem / 1000).toFixed(2)} seconds`,

View file

@ -186,7 +186,7 @@ export class QueueManager {
enableMetrics: true, enableMetrics: true,
}); });
this.caches.set(queueName, cacheProvider); this.caches.set(queueName, cacheProvider);
logger.debug('Cache created for queue', { queueName }); logger.trace('Cache created for queue', { queueName });
} }
return this.caches.get(queueName)!; return this.caches.get(queueName)!;
} }
@ -207,7 +207,7 @@ export class QueueManager {
private initializeBatchCacheSync(queueName: string): void { private initializeBatchCacheSync(queueName: string): void {
// Just create the cache - it will connect automatically when first used // Just create the cache - it will connect automatically when first used
this.getCache(queueName); this.getCache(queueName);
logger.debug('Batch cache initialized synchronously for queue', { queueName }); logger.trace('Batch cache initialized synchronously for queue', { queueName });
} }
/** /**

View file

@ -59,7 +59,7 @@ export class Queue {
this.startWorkers(config.workers, config.concurrency || 1); this.startWorkers(config.workers, config.concurrency || 1);
} }
logger.debug('Queue created', { logger.trace('Queue created', {
queueName, queueName,
workers: config.workers || 0, workers: config.workers || 0,
concurrency: config.concurrency || 1 concurrency: config.concurrency || 1
@ -77,7 +77,7 @@ export class Queue {
* Add a single job to the queue * Add a single job to the queue
*/ */
async add(name: string, data: JobData, options: JobOptions = {}): Promise<Job> { async add(name: string, data: JobData, options: JobOptions = {}): Promise<Job> {
logger.debug('Adding job', { queueName: this.queueName, jobName: name }); logger.trace('Adding job', { queueName: this.queueName, jobName: name });
return await this.bullQueue.add(name, data, options); return await this.bullQueue.add(name, data, options);
} }
@ -87,7 +87,7 @@ export class Queue {
async addBulk( async addBulk(
jobs: Array<{ name: string; data: JobData; opts?: JobOptions }> jobs: Array<{ name: string; data: JobData; opts?: JobOptions }>
): Promise<Job[]> { ): Promise<Job[]> {
logger.debug('Adding bulk jobs', { logger.trace('Adding bulk jobs', {
queueName: this.queueName, queueName: this.queueName,
jobCount: jobs.length jobCount: jobs.length
}); });
@ -257,7 +257,7 @@ export class Queue {
// Setup worker event handlers // Setup worker event handlers
worker.on('completed', (job) => { worker.on('completed', (job) => {
logger.debug('Job completed', { logger.trace('Job completed', {
queueName: this.queueName, queueName: this.queueName,
jobId: job.id, jobId: job.id,
handler: job.data?.handler, handler: job.data?.handler,
@ -299,7 +299,7 @@ export class Queue {
private async processJob(job: Job): Promise<unknown> { private async processJob(job: Job): Promise<unknown> {
const { handler, operation, payload }: JobData = job.data; const { handler, operation, payload }: JobData = job.data;
logger.debug('Processing job', { logger.trace('Processing job', {
id: job.id, id: job.id,
handler, handler,
operation, operation,
@ -316,7 +316,7 @@ export class Queue {
const result = await jobHandler(payload); const result = await jobHandler(payload);
logger.debug('Job completed successfully', { logger.trace('Job completed successfully', {
id: job.id, id: job.id,
handler, handler,
operation, operation,