reworked queue lib

This commit is contained in:
Boki 2025-06-19 07:20:14 -04:00
parent 629ba2b8d4
commit c05a7413dc
34 changed files with 3887 additions and 861 deletions

View file

@ -100,7 +100,7 @@ async function processDirect<T>(
name: 'process-item',
data: {
type: 'process-item',
provider: options.provider || 'generic',
handler: options.handler || 'generic',
operation: options.operation || 'process-item',
payload: item, // Just the item directly - no wrapper!
priority: options.priority || undefined,
@ -116,6 +116,7 @@ async function processDirect<T>(
const createdJobs = await addJobsInChunks(queue, jobs);
return {
totalItems: items.length,
jobsCreated: createdJobs.length,
@ -152,7 +153,7 @@ async function processBatched<T>(
name: 'process-batch',
data: {
type: 'process-batch',
provider: options.provider || 'generic',
handler: options.handler || 'generic',
operation: 'process-batch-items',
payload: {
payloadKey,
@ -212,7 +213,7 @@ export async function processBatchJob(
name: 'process-item',
data: {
type: 'process-item',
provider: options.provider || 'generic',
handler: options.handler || 'generic',
operation: options.operation || 'generic',
payload: item, // Just the item directly!
priority: options.priority || undefined,
@ -268,7 +269,7 @@ async function storeItems<T>(
delayPerItem: 1000,
priority: options.priority || undefined,
retries: options.retries || 3,
provider: options.provider || 'generic',
handler: options.handler || 'generic',
operation: options.operation || 'generic',
},
createdAt: new Date().toISOString(),
@ -289,7 +290,7 @@ async function loadPayload<T>(
delayPerItem: number;
priority?: number;
retries: number;
provider: string;
handler: string;
operation: string;
};
} | null> {

View file

@ -0,0 +1,258 @@
import { Queue, type Job } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import type { JobData } from './types';
import { getRedisConnection } from './utils';
const logger = getLogger('dlq-handler');
export interface DLQConfig {
maxRetries?: number;
retryDelay?: number;
alertThreshold?: number;
cleanupAge?: number; // hours
}
export class DeadLetterQueueHandler {
private dlq: Queue;
private config: Required<DLQConfig>;
private failureCount = new Map<string, number>();
constructor(
private mainQueue: Queue,
private connection: any,
config: DLQConfig = {}
) {
this.config = {
maxRetries: config.maxRetries ?? 3,
retryDelay: config.retryDelay ?? 60000, // 1 minute
alertThreshold: config.alertThreshold ?? 100,
cleanupAge: config.cleanupAge ?? 168, // 7 days
};
// Create DLQ with same name but -dlq suffix
const dlqName = `${mainQueue.name}-dlq`;
this.dlq = new Queue(dlqName, { connection: getRedisConnection(connection) });
}
/**
* Process a failed job - either retry or move to DLQ
*/
async handleFailedJob(job: Job, error: Error): Promise<void> {
const jobKey = `${job.name}:${job.id}`;
const currentFailures = (this.failureCount.get(jobKey) || 0) + 1;
this.failureCount.set(jobKey, currentFailures);
logger.warn('Job failed', {
jobId: job.id,
jobName: job.name,
attempt: job.attemptsMade,
maxAttempts: job.opts.attempts,
error: error.message,
failureCount: currentFailures,
});
// Check if job should be moved to DLQ
if (job.attemptsMade >= (job.opts.attempts || this.config.maxRetries)) {
await this.moveToDeadLetterQueue(job, error);
this.failureCount.delete(jobKey);
}
}
/**
* Move job to dead letter queue
*/
private async moveToDeadLetterQueue(job: Job, error: Error): Promise<void> {
try {
const dlqData = {
originalJob: {
id: job.id,
name: job.name,
data: job.data,
opts: job.opts,
attemptsMade: job.attemptsMade,
failedReason: job.failedReason,
processedOn: job.processedOn,
timestamp: job.timestamp,
},
error: {
message: error.message,
stack: error.stack,
name: error.name,
},
movedToDLQAt: new Date().toISOString(),
};
await this.dlq.add('failed-job', dlqData, {
removeOnComplete: false,
removeOnFail: false,
});
logger.error('Job moved to DLQ', {
jobId: job.id,
jobName: job.name,
error: error.message,
});
// Check if we need to alert
await this.checkAlertThreshold();
} catch (dlqError) {
logger.error('Failed to move job to DLQ', {
jobId: job.id,
error: dlqError,
});
}
}
/**
* Retry jobs from DLQ
*/
async retryDLQJobs(limit = 10): Promise<number> {
const jobs = await this.dlq.getCompleted(0, limit);
let retriedCount = 0;
for (const dlqJob of jobs) {
try {
const { originalJob } = dlqJob.data;
// Re-add to main queue with delay
await this.mainQueue.add(
originalJob.name,
originalJob.data,
{
...originalJob.opts,
delay: this.config.retryDelay,
attempts: this.config.maxRetries,
}
);
// Remove from DLQ
await dlqJob.remove();
retriedCount++;
logger.info('Job retried from DLQ', {
originalJobId: originalJob.id,
jobName: originalJob.name,
});
} catch (error) {
logger.error('Failed to retry DLQ job', {
dlqJobId: dlqJob.id,
error,
});
}
}
return retriedCount;
}
/**
* Get DLQ statistics
*/
async getStats(): Promise<{
total: number;
recent: number;
byJobName: Record<string, number>;
oldestJob: Date | null;
}> {
const [completed, failed, waiting] = await Promise.all([
this.dlq.getCompleted(),
this.dlq.getFailed(),
this.dlq.getWaiting(),
]);
const allJobs = [...completed, ...failed, ...waiting];
const byJobName: Record<string, number> = {};
let oldestTimestamp: number | null = null;
for (const job of allJobs) {
const jobName = job.data.originalJob?.name || 'unknown';
byJobName[jobName] = (byJobName[jobName] || 0) + 1;
if (!oldestTimestamp || job.timestamp < oldestTimestamp) {
oldestTimestamp = job.timestamp;
}
}
// Count recent jobs (last 24 hours)
const oneDayAgo = Date.now() - 24 * 60 * 60 * 1000;
const recent = allJobs.filter(job => job.timestamp > oneDayAgo).length;
return {
total: allJobs.length,
recent,
byJobName,
oldestJob: oldestTimestamp ? new Date(oldestTimestamp) : null,
};
}
/**
* Clean up old DLQ entries
*/
async cleanup(): Promise<number> {
const ageInMs = this.config.cleanupAge * 60 * 60 * 1000;
const cutoffTime = Date.now() - ageInMs;
const jobs = await this.dlq.getCompleted();
let removedCount = 0;
for (const job of jobs) {
if (job.timestamp < cutoffTime) {
await job.remove();
removedCount++;
}
}
logger.info('DLQ cleanup completed', {
removedCount,
cleanupAge: `${this.config.cleanupAge} hours`,
});
return removedCount;
}
/**
* Check if alert threshold is exceeded
*/
private async checkAlertThreshold(): Promise<void> {
const stats = await this.getStats();
if (stats.total >= this.config.alertThreshold) {
logger.error('DLQ alert threshold exceeded', {
threshold: this.config.alertThreshold,
currentCount: stats.total,
byJobName: stats.byJobName,
});
// In a real implementation, this would trigger alerts
}
}
/**
* Get failed jobs for inspection
*/
async inspectFailedJobs(limit = 10): Promise<Array<{
id: string;
name: string;
data: any;
error: any;
failedAt: string;
attempts: number;
}>> {
const jobs = await this.dlq.getCompleted(0, limit);
return jobs.map(job => ({
id: job.data.originalJob.id,
name: job.data.originalJob.name,
data: job.data.originalJob.data,
error: job.data.error,
failedAt: job.data.movedToDLQAt,
attempts: job.data.originalJob.attemptsMade,
}));
}
/**
* Shutdown DLQ handler
*/
async shutdown(): Promise<void> {
await this.dlq.close();
this.failureCount.clear();
}
}

View file

@ -0,0 +1,191 @@
import { getLogger } from '@stock-bot/logger';
import type { JobHandler, HandlerConfig, HandlerConfigWithSchedule, ScheduledJob } from './types';
const logger = getLogger('handler-registry');
class HandlerRegistry {
private handlers = new Map<string, HandlerConfig>();
private handlerSchedules = new Map<string, ScheduledJob[]>();
/**
* Register a handler with its operations (simple config)
*/
register(handlerName: string, config: HandlerConfig): void {
logger.info(`Registering handler: ${handlerName}`, {
operations: Object.keys(config),
});
this.handlers.set(handlerName, config);
}
/**
* Register a handler with operations and scheduled jobs (full config)
*/
registerWithSchedule(config: HandlerConfigWithSchedule): void {
logger.info(`Registering handler with schedule: ${config.name}`, {
operations: Object.keys(config.operations),
scheduledJobs: config.scheduledJobs?.length || 0,
});
this.handlers.set(config.name, config.operations);
if (config.scheduledJobs && config.scheduledJobs.length > 0) {
this.handlerSchedules.set(config.name, config.scheduledJobs);
}
}
/**
* Get a handler for a specific handler and operation
*/
getHandler(handler: string, operation: string): JobHandler | null {
const handlerConfig = this.handlers.get(handler);
if (!handlerConfig) {
logger.warn(`Handler not found: ${handler}`);
return null;
}
const jobHandler = handlerConfig[operation];
if (!jobHandler) {
logger.warn(`Operation not found: ${handler}:${operation}`, {
availableOperations: Object.keys(handlerConfig),
});
return null;
}
return jobHandler;
}
/**
* Get all scheduled jobs from all handlers
*/
getAllScheduledJobs(): Array<{ handler: string; job: ScheduledJob }> {
const allJobs: Array<{ handler: string; job: ScheduledJob }> = [];
for (const [handlerName, jobs] of this.handlerSchedules) {
for (const job of jobs) {
allJobs.push({
handler: handlerName,
job,
});
}
}
return allJobs;
}
/**
* Get scheduled jobs for a specific handler
*/
getScheduledJobs(handler: string): ScheduledJob[] {
return this.handlerSchedules.get(handler) || [];
}
/**
* Check if a handler has scheduled jobs
*/
hasScheduledJobs(handler: string): boolean {
return this.handlerSchedules.has(handler);
}
/**
* Get all registered handlers with their configurations
*/
getHandlerConfigs(): Array<{ name: string; operations: string[]; scheduledJobs: number }> {
return Array.from(this.handlers.keys()).map(name => ({
name,
operations: Object.keys(this.handlers.get(name) || {}),
scheduledJobs: this.handlerSchedules.get(name)?.length || 0,
}));
}
/**
* Get all handlers with their full configurations for queue manager registration
*/
getAllHandlers(): Map<string, { operations: HandlerConfig; scheduledJobs?: ScheduledJob[] }> {
const result = new Map<
string,
{ operations: HandlerConfig; scheduledJobs?: ScheduledJob[] }
>();
for (const [name, operations] of this.handlers) {
const scheduledJobs = this.handlerSchedules.get(name);
result.set(name, {
operations,
scheduledJobs,
});
}
return result;
}
/**
* Get all registered handlers
*/
getHandlers(): string[] {
return Array.from(this.handlers.keys());
}
/**
* Get operations for a specific handler
*/
getOperations(handler: string): string[] {
const handlerConfig = this.handlers.get(handler);
return handlerConfig ? Object.keys(handlerConfig) : [];
}
/**
* Check if a handler exists
*/
hasHandler(handler: string): boolean {
return this.handlers.has(handler);
}
/**
* Check if a handler has a specific operation
*/
hasOperation(handler: string, operation: string): boolean {
const handlerConfig = this.handlers.get(handler);
return handlerConfig ? operation in handlerConfig : false;
}
/**
* Remove a handler
*/
unregister(handler: string): boolean {
this.handlerSchedules.delete(handler);
return this.handlers.delete(handler);
}
/**
* Clear all handlers
*/
clear(): void {
this.handlers.clear();
this.handlerSchedules.clear();
}
/**
* Get registry statistics
*/
getStats(): { handlers: number; totalOperations: number; totalScheduledJobs: number } {
let totalOperations = 0;
let totalScheduledJobs = 0;
for (const config of this.handlers.values()) {
totalOperations += Object.keys(config).length;
}
for (const jobs of this.handlerSchedules.values()) {
totalScheduledJobs += jobs.length;
}
return {
handlers: this.handlers.size,
totalOperations,
totalScheduledJobs,
};
}
}
// Export singleton instance
export const handlerRegistry = new HandlerRegistry();

View file

@ -1,9 +1,12 @@
export * from './batch-processor';
export * from './provider-registry';
export * from './handler-registry';
export * from './queue-manager';
export * from './queue-instance';
export * from './queue-factory';
export * from './types';
export * from './dlq-handler';
export * from './queue-metrics';
export * from './rate-limiter';
// Re-export commonly used functions
export { initializeBatchCache, processBatchJob, processItems } from './batch-processor';
@ -11,7 +14,7 @@ export { initializeBatchCache, processBatchJob, processItems } from './batch-pro
export { QueueManager } from './queue-manager';
export { Queue } from './queue-instance';
export { providerRegistry } from './provider-registry';
export { handlerRegistry } from './handler-registry';
// Re-export queue factory functions
export {
@ -28,9 +31,12 @@ export type {
BatchResult,
JobHandler,
ProcessOptions,
ProviderConfig,
ProviderConfigWithSchedule,
ProviderInitializer,
HandlerConfig,
HandlerConfigWithSchedule,
HandlerInitializer,
QueueConfig,
ScheduledJob,
RateLimitConfig,
RateLimitRule,
DLQConfig,
} from './types';

View file

@ -1,191 +0,0 @@
import { getLogger } from '@stock-bot/logger';
import type { JobHandler, ProviderConfig, ProviderConfigWithSchedule, ScheduledJob } from './types';
const logger = getLogger('provider-registry');
class ProviderRegistry {
private providers = new Map<string, ProviderConfig>();
private providerSchedules = new Map<string, ScheduledJob[]>();
/**
* Register a provider with its operations (simple config)
*/
register(providerName: string, config: ProviderConfig): void {
logger.info(`Registering provider: ${providerName}`, {
operations: Object.keys(config),
});
this.providers.set(providerName, config);
}
/**
* Register a provider with operations and scheduled jobs (full config)
*/
registerWithSchedule(config: ProviderConfigWithSchedule): void {
logger.info(`Registering provider with schedule: ${config.name}`, {
operations: Object.keys(config.operations),
scheduledJobs: config.scheduledJobs?.length || 0,
});
this.providers.set(config.name, config.operations);
if (config.scheduledJobs && config.scheduledJobs.length > 0) {
this.providerSchedules.set(config.name, config.scheduledJobs);
}
}
/**
* Get a handler for a specific provider and operation
*/
getHandler(provider: string, operation: string): JobHandler | null {
const providerConfig = this.providers.get(provider);
if (!providerConfig) {
logger.warn(`Provider not found: ${provider}`);
return null;
}
const handler = providerConfig[operation];
if (!handler) {
logger.warn(`Operation not found: ${provider}:${operation}`, {
availableOperations: Object.keys(providerConfig),
});
return null;
}
return handler;
}
/**
* Get all scheduled jobs from all providers
*/
getAllScheduledJobs(): Array<{ provider: string; job: ScheduledJob }> {
const allJobs: Array<{ provider: string; job: ScheduledJob }> = [];
for (const [providerName, jobs] of this.providerSchedules) {
for (const job of jobs) {
allJobs.push({
provider: providerName,
job,
});
}
}
return allJobs;
}
/**
* Get scheduled jobs for a specific provider
*/
getScheduledJobs(provider: string): ScheduledJob[] {
return this.providerSchedules.get(provider) || [];
}
/**
* Check if a provider has scheduled jobs
*/
hasScheduledJobs(provider: string): boolean {
return this.providerSchedules.has(provider);
}
/**
* Get all registered providers with their configurations
*/
getProviderConfigs(): Array<{ name: string; operations: string[]; scheduledJobs: number }> {
return Array.from(this.providers.keys()).map(name => ({
name,
operations: Object.keys(this.providers.get(name) || {}),
scheduledJobs: this.providerSchedules.get(name)?.length || 0,
}));
}
/**
* Get all providers with their full configurations for queue manager registration
*/
getAllProviders(): Map<string, { operations: ProviderConfig; scheduledJobs?: ScheduledJob[] }> {
const result = new Map<
string,
{ operations: ProviderConfig; scheduledJobs?: ScheduledJob[] }
>();
for (const [name, operations] of this.providers) {
const scheduledJobs = this.providerSchedules.get(name);
result.set(name, {
operations,
scheduledJobs,
});
}
return result;
}
/**
* Get all registered providers
*/
getProviders(): string[] {
return Array.from(this.providers.keys());
}
/**
* Get operations for a specific provider
*/
getOperations(provider: string): string[] {
const providerConfig = this.providers.get(provider);
return providerConfig ? Object.keys(providerConfig) : [];
}
/**
* Check if a provider exists
*/
hasProvider(provider: string): boolean {
return this.providers.has(provider);
}
/**
* Check if a provider has a specific operation
*/
hasOperation(provider: string, operation: string): boolean {
const providerConfig = this.providers.get(provider);
return providerConfig ? operation in providerConfig : false;
}
/**
* Remove a provider
*/
unregister(provider: string): boolean {
this.providerSchedules.delete(provider);
return this.providers.delete(provider);
}
/**
* Clear all providers
*/
clear(): void {
this.providers.clear();
this.providerSchedules.clear();
}
/**
* Get registry statistics
*/
getStats(): { providers: number; totalOperations: number; totalScheduledJobs: number } {
let totalOperations = 0;
let totalScheduledJobs = 0;
for (const config of this.providers.values()) {
totalOperations += Object.keys(config).length;
}
for (const jobs of this.providerSchedules.values()) {
totalScheduledJobs += jobs.length;
}
return {
providers: this.providers.size,
totalOperations,
totalScheduledJobs,
};
}
}
// Export singleton instance
export const providerRegistry = new ProviderRegistry();

View file

@ -5,7 +5,7 @@ import type { ProcessOptions, BatchResult } from './types';
const logger = getLogger('queue-factory');
// Global queue manager (manages workers and providers)
// Global queue manager (manages workers and handlers)
let queueManager: QueueManager | null = null;
// Registry of individual queues
const queues = new Map<string, Queue>();
@ -31,7 +31,7 @@ export async function initializeQueueSystem(config: {
workers: config.workers || 5,
concurrency: config.concurrency || 20,
defaultJobOptions: config.defaultJobOptions,
providers: [], // Will be set by individual services
handlers: [], // Will be set by individual services
});
await queueManager.initialize();

View file

@ -1,8 +1,9 @@
import { Queue as BullQueue, Worker, QueueEvents, type Job } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import { processItems, processBatchJob } from './batch-processor';
import { providerRegistry } from './provider-registry';
import { handlerRegistry } from './handler-registry';
import type { JobData, ProcessOptions, BatchResult, BatchJobData } from './types';
import { getRedisConnection } from './utils';
const logger = getLogger('queue-instance');
@ -14,16 +15,11 @@ export class Queue {
private redisConfig: any;
private initialized = false;
constructor(queueName: string, redisConfig: any) {
constructor(queueName: string, redisConfig: any, options: { startWorker?: boolean } = {}) {
this.queueName = queueName;
this.redisConfig = redisConfig;
const connection = {
host: redisConfig.host,
port: redisConfig.port,
password: redisConfig.password,
db: redisConfig.db,
};
const connection = getRedisConnection(redisConfig);
// Initialize BullMQ queue
this.bullQueue = new BullQueue(`{${queueName}}`, {
@ -42,8 +38,10 @@ export class Queue {
// Initialize queue events
this.queueEvents = new QueueEvents(`{${queueName}}`, { connection });
// Start a worker for this queue
this.startWorker();
// Start a worker for this queue unless explicitly disabled
if (options.startWorker !== false) {
this.startWorker();
}
}
/**
@ -100,7 +98,9 @@ export class Queue {
async addBulk(
jobs: Array<{ name: string; data: JobData; opts?: Record<string, unknown> }>
): Promise<Job[]> {
return await this.bullQueue.addBulk(jobs);
const createdJobs = await this.bullQueue.addBulk(jobs);
return createdJobs;
}
/**
@ -185,12 +185,7 @@ export class Queue {
* Start a worker for this queue
*/
private startWorker(): void {
const connection = {
host: this.redisConfig.host,
port: this.redisConfig.port,
password: this.redisConfig.password,
db: this.redisConfig.db,
};
const connection = getRedisConnection(this.redisConfig);
const worker = new Worker(`{${this.queueName}}`, this.processJob.bind(this), {
connection,
@ -222,11 +217,11 @@ export class Queue {
* Process a job
*/
private async processJob(job: Job) {
const { provider, operation, payload }: JobData = job.data;
const { handler, operation, payload }: JobData = job.data;
logger.info('Processing job', {
id: job.id,
provider,
handler,
operation,
queue: this.queueName,
payloadKeys: Object.keys(payload || {}),
@ -240,18 +235,18 @@ export class Queue {
result = await processBatchJob(payload as BatchJobData, this);
} else {
// Regular handler lookup
const handler = providerRegistry.getHandler(provider, operation);
const jobHandler = handlerRegistry.getHandler(handler, operation);
if (!handler) {
throw new Error(`No handler found for ${provider}:${operation}`);
if (!jobHandler) {
throw new Error(`No handler found for ${handler}:${operation}`);
}
result = await handler(payload);
result = await jobHandler(payload);
}
logger.info('Job completed successfully', {
id: job.id,
provider,
handler,
operation,
queue: this.queueName,
});
@ -260,7 +255,7 @@ export class Queue {
} catch (error) {
logger.error('Job processing failed', {
id: job.id,
provider,
handler,
operation,
queue: this.queueName,
error: error instanceof Error ? error.message : String(error),

View file

@ -1,8 +1,11 @@
import { Queue, QueueEvents, Worker, type Job } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import { processBatchJob } from './batch-processor';
import { providerRegistry } from './provider-registry';
import type { JobData, ProviderConfig, ProviderInitializer, QueueConfig } from './types';
import { DeadLetterQueueHandler } from './dlq-handler';
import { handlerRegistry } from './handler-registry';
import { QueueMetricsCollector } from './queue-metrics';
import { QueueRateLimiter, type RateLimitRule } from './rate-limiter';
import type { HandlerConfig, HandlerInitializer, JobData, QueueConfig } from './types';
import { getRedisConnection } from './utils';
const logger = getLogger('queue-manager');
@ -11,8 +14,11 @@ export class QueueManager {
private workers: Worker[] = [];
private queueEvents!: QueueEvents;
private config: Required<QueueConfig>;
private providers: ProviderInitializer[];
private handlers: HandlerInitializer[];
private enableScheduledJobs: boolean;
private dlqHandler?: DeadLetterQueueHandler;
private metricsCollector?: QueueMetricsCollector;
private rateLimiter?: QueueRateLimiter;
private get isInitialized() {
return !!this.queue;
@ -27,18 +33,18 @@ export class QueueManager {
constructor(config: QueueConfig = {}) {
// Enhanced configuration
this.providers = config.providers || [];
this.handlers = config.handlers || [];
this.enableScheduledJobs = config.enableScheduledJobs ?? true;
// Set default configuration
this.config = {
workers: config.workers || parseInt(process.env.WORKER_COUNT || '5'),
concurrency: config.concurrency || parseInt(process.env.WORKER_CONCURRENCY || '20'),
workers: config.workers ?? 5,
concurrency: config.concurrency ?? 20,
redis: {
host: config.redis?.host || process.env.DRAGONFLY_HOST || 'localhost',
port: config.redis?.port || parseInt(process.env.DRAGONFLY_PORT || '6379'),
password: config.redis?.password || process.env.DRAGONFLY_PASSWORD,
db: config.redis?.db || parseInt(process.env.DRAGONFLY_DB || '0'),
host: config.redis?.host || 'localhost',
port: config.redis?.port || 6379,
password: config.redis?.password || '',
db: config.redis?.db || 0,
},
queueName: config.queueName || 'default-queue',
defaultJobOptions: {
@ -51,13 +57,19 @@ export class QueueManager {
},
...config.defaultJobOptions,
},
providers: this.providers,
handlers: this.handlers,
enableScheduledJobs: this.enableScheduledJobs,
enableRateLimit: config.enableRateLimit || false,
globalRateLimit: config.globalRateLimit,
enableDLQ: config.enableDLQ || false,
dlqConfig: config.dlqConfig,
enableMetrics: config.enableMetrics || false,
rateLimitRules: config.rateLimitRules || [],
};
}
/**
* Initialize the queue manager with enhanced provider and scheduled job support
* Initialize the queue manager with enhanced handler and scheduled job support
*/
async initialize(): Promise<void> {
if (this.isInitialized) {
@ -69,13 +81,13 @@ export class QueueManager {
queueName: this.config.queueName,
workers: this.config.workers,
concurrency: this.config.concurrency,
providers: this.providers.length,
handlers: this.handlers.length,
enableScheduledJobs: this.enableScheduledJobs,
});
try {
// Step 1: Register all providers
await this.registerProviders();
// Step 1: Register all handlers
await this.registerHandlers();
// Step 2: Initialize core queue infrastructure
const connection = this.getConnection();
@ -90,15 +102,39 @@ export class QueueManager {
// Initialize queue events
this.queueEvents = new QueueEvents(queueName, { connection });
// Step 3: Start workers
// Wait for queue to be ready
await this.queue.waitUntilReady();
// Step 3: Initialize DLQ handler if enabled
if (this.config.enableDLQ) {
this.dlqHandler = new DeadLetterQueueHandler(this.queue, connection, this.config.dlqConfig);
}
// Step 4: Initialize metrics collector if enabled
if (this.config.enableMetrics) {
this.metricsCollector = new QueueMetricsCollector(this.queue, this.queueEvents);
}
// Step 5: Initialize rate limiter if enabled
if (this.config.enableRateLimit && this.config.rateLimitRules) {
const redis = await this.getRedisClient();
this.rateLimiter = new QueueRateLimiter(redis);
// Add configured rate limit rules
for (const rule of this.config.rateLimitRules) {
this.rateLimiter.addRule(rule);
}
}
// Step 6: Start workers
await this.startWorkers();
// Step 4: Setup event listeners
// Step 7: Setup event listeners
this.setupEventListeners();
// Step 5: Batch cache will be initialized by individual Queue instances
// Step 8: Batch cache will be initialized by individual Queue instances
// Step 6: Set up scheduled jobs
// Step 9: Set up scheduled jobs
if (this.enableScheduledJobs) {
await this.setupScheduledJobs();
}
@ -111,45 +147,45 @@ export class QueueManager {
}
/**
* Register all configured providers
* Register all configured handlers
*/
private async registerProviders(): Promise<void> {
logger.info('Registering queue providers...', { count: this.providers.length });
private async registerHandlers(): Promise<void> {
logger.info('Registering queue handlers...', { count: this.handlers.length });
// Initialize providers using the configured provider initializers
for (const providerInitializer of this.providers) {
// Initialize handlers using the configured handler initializers
for (const handlerInitializer of this.handlers) {
try {
await providerInitializer();
await handlerInitializer();
} catch (error) {
logger.error('Failed to initialize provider', { error });
logger.error('Failed to initialize handler', { error });
throw error;
}
}
// Now register all providers from the registry with the queue manager
const allProviders = providerRegistry.getAllProviders();
for (const [providerName, config] of allProviders) {
this.registerProvider(providerName, config.operations);
logger.info(`Registered provider: ${providerName}`);
// Now register all handlers from the registry with the queue manager
const allHandlers = handlerRegistry.getAllHandlers();
for (const [handlerName, config] of allHandlers) {
this.registerHandler(handlerName, config.operations);
logger.info(`Registered handler: ${handlerName}`);
}
// Log scheduled jobs
const scheduledJobs = providerRegistry.getAllScheduledJobs();
logger.info(`Registered ${scheduledJobs.length} scheduled jobs across all providers`);
for (const { provider, job } of scheduledJobs) {
const scheduledJobs = handlerRegistry.getAllScheduledJobs();
logger.info(`Registered ${scheduledJobs.length} scheduled jobs across all handlers`);
for (const { handler, job } of scheduledJobs) {
logger.info(
`Scheduled job: ${provider}.${job.type} - ${job.description} (${job.cronPattern})`
`Scheduled job: ${handler}.${job.type} - ${job.description} (${job.cronPattern})`
);
}
logger.info('All providers registered successfully');
logger.info('All handlers registered successfully');
}
/**
* Set up scheduled jobs from provider registry
* Set up scheduled jobs from handler registry
*/
private async setupScheduledJobs(): Promise<void> {
const scheduledJobs = providerRegistry.getAllScheduledJobs();
const scheduledJobs = handlerRegistry.getAllScheduledJobs();
if (scheduledJobs.length === 0) {
logger.info('No scheduled jobs found');
@ -158,17 +194,17 @@ export class QueueManager {
logger.info(`Setting up ${scheduledJobs.length} scheduled jobs...`);
for (const { provider, job } of scheduledJobs) {
for (const { handler, job } of scheduledJobs) {
try {
const jobData: JobData = {
type: job.type,
provider,
handler,
operation: job.operation,
payload: job.payload,
priority: job.priority,
};
await this.add(`recurring-${provider}-${job.operation}`, jobData, {
await this.add(`recurring-${handler}-${job.operation}`, jobData, {
repeat: {
pattern: job.cronPattern,
tz: 'UTC',
@ -184,9 +220,9 @@ export class QueueManager {
},
});
logger.info(`Scheduled job registered: ${provider}.${job.type} (${job.cronPattern})`);
logger.info(`Scheduled job registered: ${handler}.${job.type} (${job.cronPattern})`);
} catch (error) {
logger.error(`Failed to register scheduled job: ${provider}.${job.type}`, { error });
logger.error(`Failed to register scheduled job: ${handler}.${job.type}`, { error });
}
}
@ -194,10 +230,10 @@ export class QueueManager {
}
/**
* Register a provider with its operations
* Register a handler with its operations
*/
registerProvider(providerName: string, config: ProviderConfig): void {
providerRegistry.register(providerName, config);
registerHandler(handlerName: string, config: HandlerConfig): void {
handlerRegistry.register(handlerName, config);
}
/**
@ -290,41 +326,142 @@ export class QueueManager {
return this.config.redis;
}
/**
* Get queue metrics
*/
async getMetrics() {
if (!this.metricsCollector) {
throw new Error('Metrics not enabled. Set enableMetrics: true in config');
}
return this.metricsCollector.collect();
}
/**
* Get metrics report
*/
async getMetricsReport(): Promise<string> {
if (!this.metricsCollector) {
throw new Error('Metrics not enabled. Set enableMetrics: true in config');
}
return this.metricsCollector.getReport();
}
/**
* Get DLQ stats
*/
async getDLQStats() {
if (!this.dlqHandler) {
throw new Error('DLQ not enabled. Set enableDLQ: true in config');
}
return this.dlqHandler.getStats();
}
/**
* Retry jobs from DLQ
*/
async retryDLQJobs(limit = 10) {
if (!this.dlqHandler) {
throw new Error('DLQ not enabled. Set enableDLQ: true in config');
}
return this.dlqHandler.retryDLQJobs(limit);
}
/**
* Add rate limit rule
*/
addRateLimitRule(rule: RateLimitRule): void {
if (!this.rateLimiter) {
throw new Error('Rate limiting not enabled. Set enableRateLimit: true in config');
}
this.rateLimiter.addRule(rule);
}
/**
* Get rate limit status
*/
async getRateLimitStatus(handler: string, operation: string) {
if (!this.rateLimiter) {
throw new Error('Rate limiting not enabled. Set enableRateLimit: true in config');
}
return this.rateLimiter.getStatus(handler, operation);
}
/**
* Shutdown the queue manager
*/
async shutdown(): Promise<void> {
logger.info('Shutting down queue manager...');
const shutdownTasks: Promise<void>[] = [];
try {
// Shutdown DLQ handler
if (this.dlqHandler) {
shutdownTasks.push(
this.dlqHandler.shutdown().catch(err =>
logger.warn('Error shutting down DLQ handler', { error: err })
)
);
}
// Close workers
await Promise.all(this.workers.map(worker => worker.close()));
this.workers = [];
if (this.workers.length > 0) {
shutdownTasks.push(
Promise.all(
this.workers.map(worker =>
worker.close().catch(err =>
logger.warn('Error closing worker', { error: err })
)
)
).then(() => {
this.workers = [];
})
);
}
// Close queue events
if (this.queueEvents) {
await this.queueEvents.close();
shutdownTasks.push(
this.queueEvents.close().catch(err =>
logger.warn('Error closing queue events', { error: err })
)
);
}
// Close queue
if (this.queue) {
await this.queue.close();
shutdownTasks.push(
this.queue.close().catch(err =>
logger.warn('Error closing queue', { error: err })
)
);
}
// Wait for all shutdown tasks with a timeout
await Promise.race([
Promise.all(shutdownTasks),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Shutdown timeout')), 5000)
)
]).catch(err => {
logger.warn('Some shutdown tasks did not complete cleanly', { error: err });
});
logger.info('Queue manager shutdown complete');
} catch (error) {
logger.error('Error during queue manager shutdown', { error });
throw error;
// Don't throw in shutdown to avoid hanging tests
}
}
private getConnection() {
return {
host: this.config.redis.host,
port: this.config.redis.port,
password: this.config.redis.password,
db: this.config.redis.db,
};
return getRedisConnection(this.config.redis);
}
private async getRedisClient() {
// Create a redis client for rate limiting
const Redis = require('ioredis');
return new Redis(this.getConnection());
}
private async startWorkers(): Promise<void> {
@ -359,30 +496,45 @@ export class QueueManager {
}
private async processJob(job: Job) {
const { provider, operation, payload }: JobData = job.data;
const { handler, operation, payload }: JobData = job.data;
logger.info('Processing job', {
id: job.id,
provider,
handler,
operation,
payloadKeys: Object.keys(payload || {}),
});
try {
let result;
// Check rate limits if enabled
if (this.rateLimiter) {
const rateLimit = await this.rateLimiter.checkLimit(handler, operation);
if (!rateLimit.allowed) {
// Reschedule job with delay
const delay = rateLimit.retryAfter || 60000;
logger.warn('Job rate limited, rescheduling', {
id: job.id,
handler,
operation,
retryAfter: delay,
});
// Regular handler lookup
const handler = providerRegistry.getHandler(provider, operation);
if (!handler) {
throw new Error(`No handler found for ${provider}:${operation}`);
throw new Error(`Rate limited. Retry after ${delay}ms`);
}
}
result = await handler(payload);
// Regular handler lookup
const jobHandler = handlerRegistry.getHandler(handler, operation);
if (!jobHandler) {
throw new Error(`No handler found for ${handler}:${operation}`);
}
const result = await jobHandler(payload);
logger.info('Job completed successfully', {
id: job.id,
provider,
handler,
operation,
});
@ -390,10 +542,16 @@ export class QueueManager {
} catch (error) {
logger.error('Job processing failed', {
id: job.id,
provider,
handler,
operation,
error: error instanceof Error ? error.message : String(error),
});
// Handle DLQ if enabled
if (this.dlqHandler && error instanceof Error) {
await this.dlqHandler.handleFailedJob(job, error);
}
throw error;
}
}

View file

@ -0,0 +1,327 @@
import { Queue, QueueEvents } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import type { Job } from 'bullmq';
const logger = getLogger('queue-metrics');
export interface QueueMetrics {
// Job counts
waiting: number;
active: number;
completed: number;
failed: number;
delayed: number;
paused?: number;
// Performance metrics
processingTime: {
avg: number;
min: number;
max: number;
p95: number;
p99: number;
};
// Throughput
throughput: {
completedPerMinute: number;
failedPerMinute: number;
totalPerMinute: number;
};
// Job age
oldestWaitingJob: Date | null;
// Health
isHealthy: boolean;
healthIssues: string[];
}
export class QueueMetricsCollector {
private processingTimes: number[] = [];
private completedTimestamps: number[] = [];
private failedTimestamps: number[] = [];
private readonly maxSamples = 1000;
private readonly metricsInterval = 60000; // 1 minute
constructor(
private queue: Queue,
private queueEvents: QueueEvents
) {
this.setupEventListeners();
}
/**
* Setup event listeners for metrics collection
*/
private setupEventListeners(): void {
this.queueEvents.on('completed', ({ jobId, returnvalue, prev }) => {
// Record completion
this.completedTimestamps.push(Date.now());
this.cleanupOldTimestamps();
});
this.queueEvents.on('failed', ({ jobId, failedReason, prev }) => {
// Record failure
this.failedTimestamps.push(Date.now());
this.cleanupOldTimestamps();
});
// Track processing times
this.queueEvents.on('active', async ({ jobId, prev }) => {
const job = await this.getJob(jobId);
if (job) {
(job as any)._startTime = Date.now();
}
});
this.queueEvents.on('completed', async ({ jobId }) => {
const job = await this.getJob(jobId);
if (job && (job as any)._startTime) {
const processingTime = Date.now() - (job as any)._startTime;
this.recordProcessingTime(processingTime);
}
});
}
/**
* Get job by ID
*/
private async getJob(jobId: string): Promise<Job | undefined> {
try {
return await this.queue.getJob(jobId) || undefined;
} catch {
return undefined;
}
}
/**
* Record processing time
*/
private recordProcessingTime(time: number): void {
this.processingTimes.push(time);
// Keep only recent samples
if (this.processingTimes.length > this.maxSamples) {
this.processingTimes = this.processingTimes.slice(-this.maxSamples);
}
}
/**
* Clean up old timestamps
*/
private cleanupOldTimestamps(): void {
const cutoff = Date.now() - this.metricsInterval;
this.completedTimestamps = this.completedTimestamps.filter(ts => ts > cutoff);
this.failedTimestamps = this.failedTimestamps.filter(ts => ts > cutoff);
}
/**
* Collect current metrics
*/
async collect(): Promise<QueueMetrics> {
// Get job counts
const [waiting, active, completed, failed, delayed] = await Promise.all([
this.queue.getWaitingCount(),
this.queue.getActiveCount(),
this.queue.getCompletedCount(),
this.queue.getFailedCount(),
this.queue.getDelayedCount(),
]);
// BullMQ doesn't have getPausedCount, check if queue is paused
const paused = await this.queue.isPaused() ? waiting : 0;
// Calculate processing time metrics
const processingTime = this.calculateProcessingTimeMetrics();
// Calculate throughput
const throughput = this.calculateThroughput();
// Get oldest waiting job
const oldestWaitingJob = await this.getOldestWaitingJob();
// Check health
const { isHealthy, healthIssues } = this.checkHealth({
waiting,
active,
failed,
processingTime,
});
return {
waiting,
active,
completed,
failed,
delayed,
paused,
processingTime,
throughput,
oldestWaitingJob,
isHealthy,
healthIssues,
};
}
/**
* Calculate processing time metrics
*/
private calculateProcessingTimeMetrics(): QueueMetrics['processingTime'] {
if (this.processingTimes.length === 0) {
return { avg: 0, min: 0, max: 0, p95: 0, p99: 0 };
}
const sorted = [...this.processingTimes].sort((a, b) => a - b);
const sum = sorted.reduce((acc, val) => acc + val, 0);
return {
avg: Math.round(sum / sorted.length),
min: sorted[0],
max: sorted[sorted.length - 1],
p95: sorted[Math.floor(sorted.length * 0.95)],
p99: sorted[Math.floor(sorted.length * 0.99)],
};
}
/**
* Calculate throughput metrics
*/
private calculateThroughput(): QueueMetrics['throughput'] {
const now = Date.now();
const oneMinuteAgo = now - 60000;
const completedPerMinute = this.completedTimestamps.filter(ts => ts > oneMinuteAgo).length;
const failedPerMinute = this.failedTimestamps.filter(ts => ts > oneMinuteAgo).length;
return {
completedPerMinute,
failedPerMinute,
totalPerMinute: completedPerMinute + failedPerMinute,
};
}
/**
* Get oldest waiting job
*/
private async getOldestWaitingJob(): Promise<Date | null> {
const waitingJobs = await this.queue.getWaiting(0, 1);
if (waitingJobs.length > 0) {
return new Date(waitingJobs[0].timestamp);
}
return null;
}
/**
* Check queue health
*/
private checkHealth(metrics: {
waiting: number;
active: number;
failed: number;
processingTime: QueueMetrics['processingTime'];
}): { isHealthy: boolean; healthIssues: string[] } {
const issues: string[] = [];
// Check for high failure rate
const failureRate = metrics.failed / (metrics.failed + this.completedTimestamps.length);
if (failureRate > 0.1) {
issues.push(`High failure rate: ${(failureRate * 100).toFixed(1)}%`);
}
// Check for queue backlog
if (metrics.waiting > 1000) {
issues.push(`Large queue backlog: ${metrics.waiting} jobs waiting`);
}
// Check for slow processing
if (metrics.processingTime.avg > 30000) { // 30 seconds
issues.push(`Slow average processing time: ${(metrics.processingTime.avg / 1000).toFixed(1)}s`);
}
// Check for stalled active jobs
if (metrics.active > 100) {
issues.push(`High number of active jobs: ${metrics.active}`);
}
return {
isHealthy: issues.length === 0,
healthIssues: issues,
};
}
/**
* Get formatted metrics report
*/
async getReport(): Promise<string> {
const metrics = await this.collect();
return `
Queue Metrics Report
===================
Status: ${metrics.isHealthy ? '✅ Healthy' : '⚠️ Issues Detected'}
Job Counts:
- Waiting: ${metrics.waiting}
- Active: ${metrics.active}
- Completed: ${metrics.completed}
- Failed: ${metrics.failed}
- Delayed: ${metrics.delayed}
- Paused: ${metrics.paused}
Performance:
- Avg Processing Time: ${(metrics.processingTime.avg / 1000).toFixed(2)}s
- Min/Max: ${(metrics.processingTime.min / 1000).toFixed(2)}s / ${(metrics.processingTime.max / 1000).toFixed(2)}s
- P95/P99: ${(metrics.processingTime.p95 / 1000).toFixed(2)}s / ${(metrics.processingTime.p99 / 1000).toFixed(2)}s
Throughput:
- Completed/min: ${metrics.throughput.completedPerMinute}
- Failed/min: ${metrics.throughput.failedPerMinute}
- Total/min: ${metrics.throughput.totalPerMinute}
${metrics.oldestWaitingJob ? `Oldest Waiting Job: ${metrics.oldestWaitingJob.toISOString()}` : 'No waiting jobs'}
${metrics.healthIssues.length > 0 ? `\nHealth Issues:\n${metrics.healthIssues.map(issue => `- ${issue}`).join('\n')}` : ''}
`.trim();
}
/**
* Export metrics in Prometheus format
*/
async getPrometheusMetrics(): Promise<string> {
const metrics = await this.collect();
const queueName = this.queue.name;
return `
# HELP queue_jobs_total Total number of jobs by status
# TYPE queue_jobs_total gauge
queue_jobs_total{queue="${queueName}",status="waiting"} ${metrics.waiting}
queue_jobs_total{queue="${queueName}",status="active"} ${metrics.active}
queue_jobs_total{queue="${queueName}",status="completed"} ${metrics.completed}
queue_jobs_total{queue="${queueName}",status="failed"} ${metrics.failed}
queue_jobs_total{queue="${queueName}",status="delayed"} ${metrics.delayed}
queue_jobs_total{queue="${queueName}",status="paused"} ${metrics.paused}
# HELP queue_processing_time_seconds Job processing time in seconds
# TYPE queue_processing_time_seconds summary
queue_processing_time_seconds{queue="${queueName}",quantile="0.5"} ${(metrics.processingTime.avg / 1000).toFixed(3)}
queue_processing_time_seconds{queue="${queueName}",quantile="0.95"} ${(metrics.processingTime.p95 / 1000).toFixed(3)}
queue_processing_time_seconds{queue="${queueName}",quantile="0.99"} ${(metrics.processingTime.p99 / 1000).toFixed(3)}
queue_processing_time_seconds_sum{queue="${queueName}"} ${(metrics.processingTime.avg * this.processingTimes.length / 1000).toFixed(3)}
queue_processing_time_seconds_count{queue="${queueName}"} ${this.processingTimes.length}
# HELP queue_throughput_per_minute Jobs processed per minute
# TYPE queue_throughput_per_minute gauge
queue_throughput_per_minute{queue="${queueName}",status="completed"} ${metrics.throughput.completedPerMinute}
queue_throughput_per_minute{queue="${queueName}",status="failed"} ${metrics.throughput.failedPerMinute}
queue_throughput_per_minute{queue="${queueName}",status="total"} ${metrics.throughput.totalPerMinute}
# HELP queue_health Queue health status
# TYPE queue_health gauge
queue_health{queue="${queueName}"} ${metrics.isHealthy ? 1 : 0}
`.trim();
}
}

View file

@ -0,0 +1,295 @@
import { RateLimiterRedis, RateLimiterRes } from 'rate-limiter-flexible';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('rate-limiter');
export interface RateLimitConfig {
points: number; // Number of requests
duration: number; // Per duration in seconds
blockDuration?: number; // Block duration in seconds
keyPrefix?: string;
}
export interface RateLimitRule {
level: 'global' | 'handler' | 'operation';
handler?: string;
operation?: string;
config: RateLimitConfig;
}
export class QueueRateLimiter {
private limiters = new Map<string, RateLimiterRedis>();
private rules: RateLimitRule[] = [];
constructor(private redisClient: any) {}
/**
* Add a rate limit rule
*/
addRule(rule: RateLimitRule): void {
this.rules.push(rule);
const key = this.getRuleKey(rule.level, rule.handler, rule.operation);
const limiter = new RateLimiterRedis({
storeClient: this.redisClient,
keyPrefix: rule.config.keyPrefix || `rl:${key}`,
points: rule.config.points,
duration: rule.config.duration,
blockDuration: rule.config.blockDuration || 0,
});
this.limiters.set(key, limiter);
logger.info('Rate limit rule added', {
level: rule.level,
handler: rule.handler,
operation: rule.operation,
points: rule.config.points,
duration: rule.config.duration,
});
}
/**
* Check if a job can be processed based on rate limits
*/
async checkLimit(handler: string, operation: string): Promise<{
allowed: boolean;
retryAfter?: number;
remainingPoints?: number;
}> {
const limiters = this.getApplicableLimiters(handler, operation);
if (limiters.length === 0) {
return { allowed: true };
}
try {
// Check all applicable rate limiters
const results = await Promise.all(
limiters.map(({ limiter, key }) => this.consumePoint(limiter, key))
);
// All limiters must allow the request
const blocked = results.find(r => !r.allowed);
if (blocked) {
return blocked;
}
// Return the most restrictive remaining points
const minRemainingPoints = Math.min(...results.map(r => r.remainingPoints || Infinity));
return {
allowed: true,
remainingPoints: minRemainingPoints === Infinity ? undefined : minRemainingPoints,
};
} catch (error) {
logger.error('Rate limit check failed', { handler, operation, error });
// On error, allow the request to proceed
return { allowed: true };
}
}
/**
* Consume a point from the rate limiter
*/
private async consumePoint(
limiter: RateLimiterRedis,
key: string
): Promise<{ allowed: boolean; retryAfter?: number; remainingPoints?: number }> {
try {
const result = await limiter.consume(key);
return {
allowed: true,
remainingPoints: result.remainingPoints,
};
} catch (rejRes) {
if (rejRes instanceof RateLimiterRes) {
logger.warn('Rate limit exceeded', {
key,
retryAfter: rejRes.msBeforeNext,
});
return {
allowed: false,
retryAfter: rejRes.msBeforeNext,
remainingPoints: rejRes.remainingPoints,
};
}
throw rejRes;
}
}
/**
* Get applicable rate limiters for a handler/operation
*/
private getApplicableLimiters(handler: string, operation: string): Array<{ limiter: RateLimiterRedis; key: string }> {
const applicable: Array<{ limiter: RateLimiterRedis; key: string }> = [];
for (const rule of this.rules) {
let applies = false;
let consumerKey = '';
switch (rule.level) {
case 'global':
// Global limit applies to all
applies = true;
consumerKey = 'global';
break;
case 'handler':
// Handler limit applies if handler matches
if (rule.handler === handler) {
applies = true;
consumerKey = handler;
}
break;
case 'operation':
// Operation limit applies if both handler and operation match
if (rule.handler === handler && rule.operation === operation) {
applies = true;
consumerKey = `${handler}:${operation}`;
}
break;
}
if (applies) {
const ruleKey = this.getRuleKey(rule.level, rule.handler, rule.operation);
const limiter = this.limiters.get(ruleKey);
if (limiter) {
applicable.push({ limiter, key: consumerKey });
}
}
}
return applicable;
}
/**
* Get rule key
*/
private getRuleKey(level: string, handler?: string, operation?: string): string {
switch (level) {
case 'global':
return 'global';
case 'handler':
return `handler:${handler}`;
case 'operation':
return `operation:${handler}:${operation}`;
default:
return level;
}
}
/**
* Get current rate limit status for a handler/operation
*/
async getStatus(handler: string, operation: string): Promise<{
handler: string;
operation: string;
limits: Array<{
level: string;
points: number;
duration: number;
remaining: number;
resetIn: number;
}>;
}> {
const applicable = this.getApplicableLimiters(handler, operation);
const limits = await Promise.all(
applicable.map(async ({ limiter, key }) => {
const rule = this.rules.find(r => {
const ruleKey = this.getRuleKey(r.level, r.handler, r.operation);
return this.limiters.get(ruleKey) === limiter;
});
try {
const result = await limiter.get(key);
if (!result) {
return {
level: rule?.level || 'unknown',
points: limiter.points,
duration: limiter.duration,
remaining: limiter.points,
resetIn: 0,
};
}
return {
level: rule?.level || 'unknown',
points: limiter.points,
duration: limiter.duration,
remaining: result.remainingPoints,
resetIn: result.msBeforeNext,
};
} catch (error) {
return {
level: rule?.level || 'unknown',
points: limiter.points,
duration: limiter.duration,
remaining: 0,
resetIn: 0,
};
}
})
);
return {
handler,
operation,
limits,
};
}
/**
* Reset rate limits for a handler/operation
*/
async reset(handler: string, operation?: string): Promise<void> {
const applicable = operation
? this.getApplicableLimiters(handler, operation)
: this.rules
.filter(r => !handler || r.handler === handler)
.map(r => {
const key = this.getRuleKey(r.level, r.handler, r.operation);
const limiter = this.limiters.get(key);
return limiter ? { limiter, key: handler || 'global' } : null;
})
.filter(Boolean) as Array<{ limiter: RateLimiterRedis; key: string }>;
await Promise.all(
applicable.map(({ limiter, key }) => limiter.delete(key))
);
logger.info('Rate limits reset', { handler, operation });
}
/**
* Get all configured rate limit rules
*/
getRules(): RateLimitRule[] {
return [...this.rules];
}
/**
* Remove a rate limit rule
*/
removeRule(level: string, handler?: string, operation?: string): boolean {
const key = this.getRuleKey(level, handler, operation);
const ruleIndex = this.rules.findIndex(r =>
r.level === level &&
(!handler || r.handler === handler) &&
(!operation || r.operation === operation)
);
if (ruleIndex >= 0) {
this.rules.splice(ruleIndex, 1);
this.limiters.delete(key);
logger.info('Rate limit rule removed', { level, handler, operation });
return true;
}
return false;
}
}

View file

@ -1,7 +1,7 @@
// Types for queue operations
export interface JobData {
type?: string;
provider: string;
handler: string;
operation: string;
payload: any;
priority?: number;
@ -17,7 +17,7 @@ export interface ProcessOptions {
removeOnComplete?: number;
removeOnFail?: number;
// Job routing information
provider?: string;
handler?: string;
operation?: string;
// Optional queue for overloaded function signatures
queue?: any; // QueueManager reference
@ -50,8 +50,15 @@ export interface QueueConfig {
delay: number;
};
};
providers?: ProviderInitializer[];
handlers?: HandlerInitializer[];
enableScheduledJobs?: boolean;
// Rate limiting
enableRateLimit?: boolean;
globalRateLimit?: RateLimitConfig;
enableDLQ?: boolean;
dlqConfig?: DLQConfig;
enableMetrics?: boolean;
rateLimitRules?: RateLimitRule[];
}
export interface JobHandler {
@ -69,14 +76,17 @@ export interface ScheduledJob {
delay?: number;
}
export interface ProviderConfig {
export interface HandlerConfig {
[operation: string]: JobHandler;
}
export interface ProviderConfigWithSchedule {
export interface HandlerConfigWithSchedule {
name: string;
operations: Record<string, JobHandler>;
scheduledJobs?: ScheduledJob[];
// Rate limiting
rateLimit?: RateLimitConfig;
operationLimits?: Record<string, RateLimitConfig>;
}
export interface BatchJobData {
@ -86,6 +96,28 @@ export interface BatchJobData {
itemCount: number;
}
export interface ProviderInitializer {
export interface HandlerInitializer {
(): void | Promise<void>;
}
// Rate limiting types
export interface RateLimitConfig {
points: number;
duration: number;
blockDuration?: number;
}
export interface RateLimitRule {
level: 'global' | 'handler' | 'operation';
handler?: string;
operation?: string;
config: RateLimitConfig;
}
// DLQ types
export interface DLQConfig {
maxRetries?: number;
retryDelay?: number;
alertThreshold?: number;
cleanupAge?: number;
}

31
libs/queue/src/utils.ts Normal file
View file

@ -0,0 +1,31 @@
/**
* Get Redis connection configuration with retry settings
*/
export function getRedisConnection(config: {
host: string;
port: number;
password?: string;
db?: number;
}) {
const isTest = process.env.NODE_ENV === 'test' || process.env.BUNIT === '1';
return {
host: config.host,
port: config.port,
password: config.password,
db: config.db,
maxRetriesPerRequest: null, // Required by BullMQ
enableReadyCheck: false,
connectTimeout: isTest ? 1000 : 3000,
lazyConnect: true,
keepAlive: false,
retryStrategy: (times: number) => {
const maxRetries = isTest ? 1 : 3;
if (times > maxRetries) {
return null; // Stop retrying
}
const delay = isTest ? 100 : Math.min(times * 100, 3000);
return delay;
},
};
}