fixed cache keys

This commit is contained in:
Boki 2025-06-22 20:34:35 -04:00
parent db3aa9c330
commit 19dfda2392
13 changed files with 286 additions and 221 deletions

View file

@ -1,107 +0,0 @@
import { OperationContext } from '@stock-bot/di';
import type { ServiceContainer } from '@stock-bot/di';
/**
* Example handler showing how to use the new connection pooling pattern
*/
export class ExampleHandler {
constructor(private readonly container: ServiceContainer) {}
/**
* Example operation using the enhanced OperationContext
*/
async performOperation(data: any): Promise<void> {
// Create operation context with container
const context = new OperationContext('example-handler', 'perform-operation', this.container, {
data,
});
try {
// Log operation start
context.logger.info('Starting operation', { data });
// Use MongoDB through service resolution
const mongodb = context.resolve<any>('mongodb');
const result = await mongodb.collection('test').insertOne(data);
context.logger.debug('MongoDB insert complete', { insertedId: result.insertedId });
// Use PostgreSQL through service resolution
const postgres = context.resolve<any>('postgres');
await postgres.query('INSERT INTO operations (id, status) VALUES ($1, $2)', [
result.insertedId,
'completed',
]);
// Use cache through service resolution
const cache = context.resolve<any>('cache');
await cache.set(`operation:${result.insertedId}`, {
status: 'completed',
timestamp: new Date(),
});
context.logger.info('Operation completed successfully');
} catch (error) {
context.logger.error('Operation failed', { error });
throw error;
}
}
/**
* Example of batch operation with isolated connection pool
*/
async performBatchOperation(items: any[]): Promise<void> {
// Create a scoped container for this batch operation
const scopedContainer = this.container.createScope();
const context = new OperationContext('example-handler', 'batch-operation', scopedContainer, {
itemCount: items.length,
});
try {
context.logger.info('Starting batch operation', { itemCount: items.length });
// Get services once for the batch
const mongodb = context.resolve<any>('mongodb');
const cache = context.resolve<any>('cache');
// Process items in parallel
const promises = items.map(async (item, index) => {
const itemContext = new OperationContext(
'example-handler',
`batch-item-${index}`,
scopedContainer,
{ item }
);
try {
await mongodb.collection('batch').insertOne(item);
await cache.set(`batch:${item.id}`, item);
} catch (error) {
itemContext.logger.error('Batch item failed', { error, itemIndex: index });
throw error;
}
});
await Promise.all(promises);
context.logger.info('Batch operation completed');
} finally {
// Clean up scoped resources
await scopedContainer.dispose();
}
}
}
/**
* Example of how to use in a job handler
*/
export async function createExampleJobHandler(container: ServiceContainer) {
return async (job: any) => {
const handler = new ExampleHandler(container);
if (job.data.type === 'batch') {
await handler.performBatchOperation(job.data.items);
} else {
await handler.performOperation(job.data);
}
};
}

View file

@ -43,14 +43,14 @@ export class WebShareHandler extends BaseHandler {
workingCount: proxies.filter(p => p.isWorking !== false).length,
});
// Cache proxy stats for monitoring
await this.cache.set('webshare-proxy-count', proxies.length, 3600);
await this.cache.set(
'webshare-working-count',
// Cache proxy stats for monitoring using handler's cache methods
await this.cacheSet('proxy-count', proxies.length, 3600);
await this.cacheSet(
'working-count',
proxies.filter(p => p.isWorking !== false).length,
3600
);
await this.cache.set('last-webshare-fetch', new Date().toISOString(), 1800);
await this.cacheSet('last-fetch', new Date().toISOString(), 1800);
return {
success: true,