fixing batch before moving to the apps

This commit is contained in:
Boki 2025-06-19 21:33:31 -04:00
parent 8c2f98e010
commit 3534a2c47b
14 changed files with 43 additions and 884 deletions

View file

@ -14,7 +14,7 @@ export async function processItems<T>(
options: ProcessOptions
): Promise<BatchResult> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue(queueName);
queueManager.getQueue(queueName);
const startTime = Date.now();
if (items.length === 0) {
@ -61,7 +61,7 @@ async function processDirect<T>(
options: ProcessOptions
): Promise<Omit<BatchResult, 'duration'>> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue(queueName);
queueManager.getQueue(queueName);
const totalDelayMs = options.totalDelayHours * 60 * 60 * 1000; // Convert hours to milliseconds
const delayPerItem = totalDelayMs / items.length;
@ -106,7 +106,7 @@ async function processBatched<T>(
options: ProcessOptions
): Promise<Omit<BatchResult, 'duration'>> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue(queueName);
queueManager.getQueue(queueName);
const batchSize = options.batchSize || 100;
const batches = createBatches(items, batchSize);
const totalDelayMs = options.totalDelayHours * 60 * 60 * 1000; // Convert hours to milliseconds
@ -166,7 +166,7 @@ export async function processBatchJob(
queueName: string
): Promise<unknown> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue(queueName);
queueManager.getQueue(queueName);
const { payloadKey, batchIndex, totalBatches, itemCount } = jobData;
logger.debug('Processing batch job', {
@ -274,7 +274,7 @@ async function loadPayload<T>(
delayPerItem: number;
priority?: number;
retries: number;
provider: string;
handler: string;
operation: string;
};
} | null;

View file

@ -1,6 +1,6 @@
import { Queue, type Job } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import type { JobData, DLQConfig, RedisConfig } from './types';
import type { DLQConfig, RedisConfig } from './types';
import { getRedisConnection } from './utils';
const logger = getLogger('dlq-handler');
@ -12,7 +12,7 @@ export class DeadLetterQueueHandler {
constructor(
private mainQueue: Queue,
private connection: RedisConfig,
connection: RedisConfig,
config: DLQConfig = {}
) {
this.config = {

View file

@ -7,8 +7,7 @@ import type {
QueueOptions,
GlobalStats,
QueueStats,
RateLimitRule,
RedisConfig
RateLimitRule
} from './types';
import { getRedisConnection } from './utils';
@ -127,7 +126,7 @@ export class QueueManager {
const queueConfig: QueueWorkerConfig = {
workers: mergedOptions.workers,
concurrency: mergedOptions.concurrency,
startWorker: mergedOptions.workers && mergedOptions.workers > 0,
startWorker: !!mergedOptions.workers && mergedOptions.workers > 0,
};
const queue = new Queue(
@ -387,7 +386,7 @@ export class QueueManager {
try {
await queue.close();
} catch (error) {
logger.warn('Error closing queue', { error: error.message });
logger.warn('Error closing queue', { error: (error as Error).message });
}
});
@ -396,16 +395,10 @@ export class QueueManager {
// Close all caches
const cacheShutdownPromises = Array.from(this.caches.values()).map(async (cache) => {
try {
// Try different disconnect methods as different cache providers may use different names
if (typeof cache.disconnect === 'function') {
await cache.disconnect();
} else if (typeof cache.close === 'function') {
await cache.close();
} else if (typeof cache.quit === 'function') {
await cache.quit();
}
// Clear cache before shutdown
await cache.clear();
} catch (error) {
logger.warn('Error closing cache', { error: error.message });
logger.warn('Error clearing cache', { error: (error as Error).message });
}
});
@ -417,7 +410,7 @@ export class QueueManager {
logger.info('QueueManager shutdown complete');
} catch (error) {
logger.error('Error during shutdown', { error: error.message });
logger.error('Error during shutdown', { error: (error as Error).message });
throw error;
} finally {
// Reset shutdown state

View file

@ -2,7 +2,7 @@ import { Queue, QueueEvents } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import type { Job } from 'bullmq';
const logger = getLogger('queue-metrics');
// const logger = getLogger('queue-metrics');
export interface QueueMetrics {
// Job counts
@ -55,20 +55,20 @@ export class QueueMetricsCollector {
* Setup event listeners for metrics collection
*/
private setupEventListeners(): void {
this.queueEvents.on('completed', ({ jobId, returnvalue, prev }) => {
this.queueEvents.on('completed', () => {
// Record completion
this.completedTimestamps.push(Date.now());
this.cleanupOldTimestamps();
});
this.queueEvents.on('failed', ({ jobId, failedReason, prev }) => {
this.queueEvents.on('failed', () => {
// Record failure
this.failedTimestamps.push(Date.now());
this.cleanupOldTimestamps();
});
// Track processing times
this.queueEvents.on('active', async ({ jobId, prev }) => {
this.queueEvents.on('active', async ({ jobId }) => {
const job = await this.getJob(jobId);
if (job) {
(job as any)._startTime = Date.now();
@ -177,11 +177,11 @@ export class QueueMetricsCollector {
const sum = sorted.reduce((acc, val) => acc + val, 0);
return {
avg: Math.round(sum / sorted.length),
min: sorted[0],
max: sorted[sorted.length - 1],
p95: sorted[Math.floor(sorted.length * 0.95)],
p99: sorted[Math.floor(sorted.length * 0.99)],
avg: sorted.length > 0 ? Math.round(sum / sorted.length) : 0,
min: sorted[0] || 0,
max: sorted[sorted.length - 1] || 0,
p95: sorted[Math.floor(sorted.length * 0.95)] || 0,
p99: sorted[Math.floor(sorted.length * 0.99)] || 0,
};
}

View file

@ -222,7 +222,6 @@ export class Queue {
concurrency,
maxStalledCount: 3,
stalledInterval: 30000,
maxStalledTime: 60000,
}
);

View file

@ -24,7 +24,7 @@ export class QueueRateLimiter {
const key = this.getRuleKey(rule.level, rule.queueName, rule.handler, rule.operation);
const limiter = new RateLimiterRedis({
storeClient: this.redisClient,
keyPrefix: rule.config.keyPrefix || `rl:${key}`,
keyPrefix: `rl:${key}`,
points: rule.config.points,
duration: rule.config.duration,
blockDuration: rule.config.blockDuration || 0,
@ -224,7 +224,7 @@ export class QueueRateLimiter {
queueName,
handler,
operation,
appliedRule,
appliedRule: applicableRule,
limit,
};
} catch (error) {
@ -233,7 +233,7 @@ export class QueueRateLimiter {
queueName,
handler,
operation,
appliedRule,
appliedRule: applicableRule,
};
}
}

View file

@ -154,3 +154,19 @@ export interface DLQConfig {
alertThreshold?: number;
cleanupAge?: number;
}
export interface DLQJobInfo {
id: string;
name: string;
failedReason: string;
attemptsMade: number;
timestamp: number;
data: any;
}
export interface ScheduleConfig {
pattern: string;
jobName: string;
data?: any;
options?: JobOptions;
}

View file

@ -4,7 +4,7 @@ import type { RedisConfig } from './types';
* Get Redis connection configuration with retry settings
*/
export function getRedisConnection(config: RedisConfig) {
const isTest = process.env.NODE_ENV === 'test' || process.env.BUNIT === '1';
const isTest = process.env.NODE_ENV === 'test' || process.env['BUNIT'] === '1';
return {
host: config.host,