logger catagorizing libs

This commit is contained in:
Boki 2025-06-20 07:53:53 -04:00
parent 7a8e542ada
commit 4726a85cf3
12 changed files with 35 additions and 36 deletions

View file

@ -48,7 +48,7 @@ export class RedisConnectionManager {
const uniqueName = `${name}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
const connection = this.createConnection(uniqueName, redisConfig, db);
this.connections.set(uniqueName, connection);
this.logger.info(`Created unique Redis connection: ${uniqueName}`);
this.logger.debug(`Created unique Redis connection: ${uniqueName}`);
return connection;
}
}
@ -96,11 +96,11 @@ export class RedisConnectionManager {
});
redis.on('close', () => {
this.logger.info(`Redis connection closed: ${name}`);
this.logger.warn(`Redis connection closed: ${name}`);
});
redis.on('reconnecting', () => {
this.logger.info(`Redis reconnecting: ${name}`);
this.logger.warn(`Redis reconnecting: ${name}`);
});
return redis;
@ -113,7 +113,7 @@ export class RedisConnectionManager {
try {
await connection.quit();
} catch (error) {
this.logger.error('Error closing Redis connection:', error);
this.logger.warn('Error closing Redis connection:', error);
}
}
@ -202,7 +202,7 @@ export class RedisConnectionManager {
const allConnections = new Map([...instance.connections, ...this.sharedConnections]);
if (allConnections.size === 0) {
instance.logger.info('No Redis connections to wait for');
instance.logger.debug('No Redis connections to wait for');
return;
}
@ -233,7 +233,7 @@ export class RedisConnectionManager {
const onReady = () => {
clearTimeout(timeoutId);
RedisConnectionManager.readyConnections.add(name);
this.logger.info(`Redis connection ready: ${name}`);
this.logger.debug(`Redis connection ready: ${name}`);
resolve();
};

View file

@ -79,7 +79,7 @@ export class RedisCache implements CacheProvider {
});
this.redis.on('reconnecting', () => {
this.logger.info('Redis cache reconnecting...');
this.logger.warn('Redis cache reconnecting...');
});
}
@ -282,7 +282,7 @@ export class RedisCache implements CacheProvider {
const keys = await this.redis.keys(pattern);
if (keys.length > 0) {
await this.redis.del(...keys);
this.logger.info('Cache cleared', { keysDeleted: keys.length });
this.logger.warn('Cache cleared', { keysDeleted: keys.length });
}
},
undefined,

View file

@ -108,7 +108,7 @@ export class HttpClient {
const timeoutPromise = new Promise<never>((_, reject) => {
timeoutId = setTimeout(() => {
const elapsed = Date.now() - startTime;
this.logger?.debug('Request timeout triggered', {
this.logger?.warn('Request timeout triggered', {
url: config.url,
method: config.method,
timeout,
@ -142,7 +142,7 @@ export class HttpClient {
return response;
} catch (error) {
const elapsed = Date.now() - startTime;
this.logger?.debug('Adapter failed successful', {
this.logger?.debug('Adapter request failed', {
url: config.url,
elapsedMs: elapsed,
});

View file

@ -32,7 +32,6 @@ export class ProxyManager {
switch (proxy.protocol) {
case 'socks4':
case 'socks5':
// console.log(`Using SOCKS proxy: ${proxyUrl}`);
return new SocksProxyAgent(proxyUrl);
case 'http':
return new HttpProxyAgent(proxyUrl);

View file

@ -86,7 +86,7 @@ export class MongoDBClient {
this.defaultDatabase = databaseName;
if (this.client) {
this.db = this.client.db(databaseName);
this.logger.info(`Default database changed to: ${databaseName}`);
this.logger.debug(`Default database changed to: ${databaseName}`);
}
}

View file

@ -77,7 +77,7 @@ export class QuestDBClient {
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`QuestDB connection attempt ${attempt} failed:`, error);
this.logger.warn(`QuestDB connection attempt ${attempt} failed:`, error);
if (this.pgPool) {
await this.pgPool.end();
@ -362,7 +362,7 @@ export class QuestDBClient {
*/
async optimizeTable(tableName: string): Promise<void> {
await this.query(`VACUUM TABLE ${tableName}`);
this.logger.info(`Optimized table: ${tableName}`);
this.logger.debug(`Optimized table: ${tableName}`);
}
/**

View file

@ -188,7 +188,7 @@ export class QuestDBHealthMonitor {
memoryUsage: process.memoryUsage().heapUsed,
};
this.logger.info('Performance metrics reset');
this.logger.debug('Performance metrics reset');
}
/**
@ -229,6 +229,6 @@ export class QuestDBHealthMonitor {
*/
public destroy(): void {
this.stopMonitoring();
this.logger.info('Health monitor destroyed');
this.logger.debug('Health monitor destroyed');
}
}

View file

@ -150,7 +150,7 @@ export class QuestDBInfluxWriter {
});
if (Object.keys(fields).length === 0) {
this.logger.warn('No analytics fields to write', { symbol, timestamp: analytics.timestamp });
this.logger.debug('No analytics fields to write', { symbol, timestamp: analytics.timestamp });
return;
}
@ -345,7 +345,7 @@ export class QuestDBInfluxWriter {
return;
} catch (error) {
attempt++;
this.logger.error(`Write attempt ${attempt} failed`, {
this.logger.warn(`Write attempt ${attempt} failed`, {
error,
linesCount: lines.length,
willRetry: attempt <= options.retryAttempts,
@ -425,6 +425,6 @@ export class QuestDBInfluxWriter {
*/
public destroy(): void {
this.clearBuffer();
this.logger.info('InfluxDB writer destroyed');
this.logger.debug('InfluxDB writer destroyed');
}
}

View file

@ -178,11 +178,11 @@ export class QuestDBSchemaManager {
try {
await this.client.query(sql);
this.logger.info(`Table ${schema.tableName} created`, { sql });
this.logger.debug(`Table ${schema.tableName} created`, { sql });
} catch (error) {
// Check if table already exists
if (error instanceof Error && error.message.includes('already exists')) {
this.logger.info(`Table ${schema.tableName} already exists`);
this.logger.debug(`Table ${schema.tableName} already exists`);
return;
}
throw error;
@ -197,7 +197,7 @@ export class QuestDBSchemaManager {
try {
await this.client.query(sql);
this.logger.info(`Table ${tableName} dropped`);
this.logger.warn(`Table ${tableName} dropped`);
} catch (error) {
this.logger.error(`Failed to drop table ${tableName}`, error);
throw error;
@ -234,7 +234,7 @@ export class QuestDBSchemaManager {
*/
public addSchema(schema: TableSchema): void {
this.schemas.set(schema.tableName, schema);
this.logger.info(`Schema added for table: ${schema.tableName}`);
this.logger.debug(`Schema added for table: ${schema.tableName}`);
}
/**
@ -256,7 +256,7 @@ export class QuestDBSchemaManager {
// QuestDB automatically optimizes, but we can analyze table stats
try {
const stats = await this.getTableStats(tableName);
this.logger.info(`Table ${tableName} stats`, stats);
this.logger.debug(`Table ${tableName} stats`, stats);
} catch (error) {
this.logger.error(`Failed to optimize table ${tableName}`, error);
throw error;
@ -289,7 +289,7 @@ export class QuestDBSchemaManager {
public async truncateTable(tableName: string): Promise<void> {
try {
await this.client.query(`TRUNCATE TABLE ${tableName}`);
this.logger.info(`Table ${tableName} truncated`);
this.logger.warn(`Table ${tableName} truncated`);
} catch (error) {
this.logger.error(`Failed to truncate table ${tableName}`, error);
throw error;
@ -302,7 +302,7 @@ export class QuestDBSchemaManager {
public async createPartitions(tableName: string, _days: number = 30): Promise<void> {
// QuestDB handles partitioning automatically based on the PARTITION BY clause
// This method is for future extensibility
this.logger.info(`Partitioning is automatic for table ${tableName}`);
this.logger.debug(`Partitioning is automatic for table ${tableName}`);
}
/**

View file

@ -166,7 +166,7 @@ export async function processBatchJob(jobData: BatchJobData, queueName: string):
queueManager.getQueue(queueName);
const { payloadKey, batchIndex, totalBatches, itemCount, totalDelayHours } = jobData;
logger.debug('Processing batch job', {
logger.trace('Processing batch job', {
batchIndex,
totalBatches,
itemCount,
@ -187,7 +187,7 @@ export async function processBatchJob(jobData: BatchJobData, queueName: string):
const delayPerBatch = totalDelayMs / totalBatches; // Time allocated for each batch
const delayPerItem = delayPerBatch / items.length; // Distribute items evenly within batch window
logger.debug('Calculating job delays', {
logger.trace('Calculating job delays', {
batchIndex,
delayPerBatch: `${(delayPerBatch / 1000 / 60).toFixed(2)} minutes`,
delayPerItem: `${(delayPerItem / 1000).toFixed(2)} seconds`,

View file

@ -186,7 +186,7 @@ export class QueueManager {
enableMetrics: true,
});
this.caches.set(queueName, cacheProvider);
logger.debug('Cache created for queue', { queueName });
logger.trace('Cache created for queue', { queueName });
}
return this.caches.get(queueName)!;
}
@ -207,7 +207,7 @@ export class QueueManager {
private initializeBatchCacheSync(queueName: string): void {
// Just create the cache - it will connect automatically when first used
this.getCache(queueName);
logger.debug('Batch cache initialized synchronously for queue', { queueName });
logger.trace('Batch cache initialized synchronously for queue', { queueName });
}
/**

View file

@ -59,7 +59,7 @@ export class Queue {
this.startWorkers(config.workers, config.concurrency || 1);
}
logger.debug('Queue created', {
logger.trace('Queue created', {
queueName,
workers: config.workers || 0,
concurrency: config.concurrency || 1
@ -77,7 +77,7 @@ export class Queue {
* Add a single job to the queue
*/
async add(name: string, data: JobData, options: JobOptions = {}): Promise<Job> {
logger.debug('Adding job', { queueName: this.queueName, jobName: name });
logger.trace('Adding job', { queueName: this.queueName, jobName: name });
return await this.bullQueue.add(name, data, options);
}
@ -87,7 +87,7 @@ export class Queue {
async addBulk(
jobs: Array<{ name: string; data: JobData; opts?: JobOptions }>
): Promise<Job[]> {
logger.debug('Adding bulk jobs', {
logger.trace('Adding bulk jobs', {
queueName: this.queueName,
jobCount: jobs.length
});
@ -257,7 +257,7 @@ export class Queue {
// Setup worker event handlers
worker.on('completed', (job) => {
logger.debug('Job completed', {
logger.trace('Job completed', {
queueName: this.queueName,
jobId: job.id,
handler: job.data?.handler,
@ -299,7 +299,7 @@ export class Queue {
private async processJob(job: Job): Promise<unknown> {
const { handler, operation, payload }: JobData = job.data;
logger.debug('Processing job', {
logger.trace('Processing job', {
id: job.id,
handler,
operation,
@ -316,7 +316,7 @@ export class Queue {
const result = await jobHandler(payload);
logger.debug('Job completed successfully', {
logger.trace('Job completed successfully', {
id: job.id,
handler,
operation,