reworked queue lib

This commit is contained in:
Boki 2025-06-19 07:20:14 -04:00
parent 629ba2b8d4
commit c05a7413dc
34 changed files with 3887 additions and 861 deletions

19
libs/browser/turbo.json Normal file
View file

@ -0,0 +1,19 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/logger#build", "@stock-bot/http#build"],
"outputs": ["dist/**"],
"inputs": [
"src/**",
"package.json",
"tsconfig.json",
"!**/*.test.ts",
"!**/*.spec.ts",
"!**/test/**",
"!**/tests/**",
"!**/__tests__/**"
]
}
}
}

19
libs/config/turbo.json Normal file
View file

@ -0,0 +1,19 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": [],
"outputs": ["dist/**"],
"inputs": [
"src/**",
"package.json",
"tsconfig.json",
"!**/*.test.ts",
"!**/*.spec.ts",
"!**/test/**",
"!**/tests/**",
"!**/__tests__/**"
]
}
}
}

19
libs/proxy/turbo.json Normal file
View file

@ -0,0 +1,19 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": [],
"outputs": ["dist/**"],
"inputs": [
"src/**",
"package.json",
"tsconfig.json",
"!**/*.test.ts",
"!**/*.spec.ts",
"!**/test/**",
"!**/tests/**",
"!**/__tests__/**"
]
}
}
}

View file

@ -1,85 +0,0 @@
#!/usr/bin/env bun
/**
* Debug script to test batch cleanup issue
*/
import { initializeBatchCache, processItems, QueueManager } from './src';
async function debugBatchCleanup() {
console.log('🔍 Debugging batch cleanup...');
const queueManager = new QueueManager({
queueName: 'debug-cleanup-queue',
workers: 1,
concurrency: 2,
});
// Register a simple test provider
queueManager.registerProvider('test', {
'process-item': async payload => {
console.log(`🔄 Processing item: ${JSON.stringify(payload)}`);
await new Promise(resolve => setTimeout(resolve, 100));
return { processed: true, item: payload };
},
});
await queueManager.initialize();
await initializeBatchCache(queueManager);
// Test data
const items = Array.from({ length: 7 }, (_, i) => ({
id: i + 1,
data: `item-${i + 1}`,
}));
console.log(`📦 Processing ${items.length} items in batches of 3...`);
// Process in batches
const result = await processItems(items, queueManager, {
totalDelayHours: 0.0028, // 10 seconds
useBatching: true,
batchSize: 3, // This will create 3 batches: [3,3,1]
priority: 1,
provider: 'test',
operation: 'process-item',
removeOnComplete: 2, // Keep only 2 completed jobs
removeOnFail: 2,
});
console.log('📊 Processing result:', result);
// Monitor queue and cache cleanup
let iterations = 0;
const monitor = setInterval(async () => {
iterations++;
const stats = await queueManager.getStats();
console.log(`📈 [${iterations}] Queue stats:`, {
waiting: stats.waiting,
active: stats.active,
completed: stats.completed,
failed: stats.failed,
});
// Check if any jobs are stuck
if (iterations > 20) {
console.log('❌ Timeout reached, stopping monitor');
clearInterval(monitor);
await queueManager.shutdown();
}
if (stats.waiting === 0 && stats.active === 0) {
console.log('✅ All jobs completed');
clearInterval(monitor);
// Wait a bit more to see final cleanup
setTimeout(async () => {
const finalStats = await queueManager.getStats();
console.log('📊 Final stats:', finalStats);
await queueManager.shutdown();
}, 2000);
}
}, 1000);
}
if (require.main === module) {
debugBatchCleanup().catch(console.error);
}

View file

@ -11,13 +11,16 @@
},
"dependencies": {
"bullmq": "^5.0.0",
"ioredis": "^5.3.0",
"rate-limiter-flexible": "^3.0.0",
"@stock-bot/cache": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*"
},
"devDependencies": {
"typescript": "^5.3.0",
"@types/node": "^20.0.0"
"@types/node": "^20.0.0",
"testcontainers": "^10.0.0"
},
"publishConfig": {
"access": "restricted"

View file

@ -100,7 +100,7 @@ async function processDirect<T>(
name: 'process-item',
data: {
type: 'process-item',
provider: options.provider || 'generic',
handler: options.handler || 'generic',
operation: options.operation || 'process-item',
payload: item, // Just the item directly - no wrapper!
priority: options.priority || undefined,
@ -116,6 +116,7 @@ async function processDirect<T>(
const createdJobs = await addJobsInChunks(queue, jobs);
return {
totalItems: items.length,
jobsCreated: createdJobs.length,
@ -152,7 +153,7 @@ async function processBatched<T>(
name: 'process-batch',
data: {
type: 'process-batch',
provider: options.provider || 'generic',
handler: options.handler || 'generic',
operation: 'process-batch-items',
payload: {
payloadKey,
@ -212,7 +213,7 @@ export async function processBatchJob(
name: 'process-item',
data: {
type: 'process-item',
provider: options.provider || 'generic',
handler: options.handler || 'generic',
operation: options.operation || 'generic',
payload: item, // Just the item directly!
priority: options.priority || undefined,
@ -268,7 +269,7 @@ async function storeItems<T>(
delayPerItem: 1000,
priority: options.priority || undefined,
retries: options.retries || 3,
provider: options.provider || 'generic',
handler: options.handler || 'generic',
operation: options.operation || 'generic',
},
createdAt: new Date().toISOString(),
@ -289,7 +290,7 @@ async function loadPayload<T>(
delayPerItem: number;
priority?: number;
retries: number;
provider: string;
handler: string;
operation: string;
};
} | null> {

View file

@ -0,0 +1,258 @@
import { Queue, type Job } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import type { JobData } from './types';
import { getRedisConnection } from './utils';
const logger = getLogger('dlq-handler');
export interface DLQConfig {
maxRetries?: number;
retryDelay?: number;
alertThreshold?: number;
cleanupAge?: number; // hours
}
export class DeadLetterQueueHandler {
private dlq: Queue;
private config: Required<DLQConfig>;
private failureCount = new Map<string, number>();
constructor(
private mainQueue: Queue,
private connection: any,
config: DLQConfig = {}
) {
this.config = {
maxRetries: config.maxRetries ?? 3,
retryDelay: config.retryDelay ?? 60000, // 1 minute
alertThreshold: config.alertThreshold ?? 100,
cleanupAge: config.cleanupAge ?? 168, // 7 days
};
// Create DLQ with same name but -dlq suffix
const dlqName = `${mainQueue.name}-dlq`;
this.dlq = new Queue(dlqName, { connection: getRedisConnection(connection) });
}
/**
* Process a failed job - either retry or move to DLQ
*/
async handleFailedJob(job: Job, error: Error): Promise<void> {
const jobKey = `${job.name}:${job.id}`;
const currentFailures = (this.failureCount.get(jobKey) || 0) + 1;
this.failureCount.set(jobKey, currentFailures);
logger.warn('Job failed', {
jobId: job.id,
jobName: job.name,
attempt: job.attemptsMade,
maxAttempts: job.opts.attempts,
error: error.message,
failureCount: currentFailures,
});
// Check if job should be moved to DLQ
if (job.attemptsMade >= (job.opts.attempts || this.config.maxRetries)) {
await this.moveToDeadLetterQueue(job, error);
this.failureCount.delete(jobKey);
}
}
/**
* Move job to dead letter queue
*/
private async moveToDeadLetterQueue(job: Job, error: Error): Promise<void> {
try {
const dlqData = {
originalJob: {
id: job.id,
name: job.name,
data: job.data,
opts: job.opts,
attemptsMade: job.attemptsMade,
failedReason: job.failedReason,
processedOn: job.processedOn,
timestamp: job.timestamp,
},
error: {
message: error.message,
stack: error.stack,
name: error.name,
},
movedToDLQAt: new Date().toISOString(),
};
await this.dlq.add('failed-job', dlqData, {
removeOnComplete: false,
removeOnFail: false,
});
logger.error('Job moved to DLQ', {
jobId: job.id,
jobName: job.name,
error: error.message,
});
// Check if we need to alert
await this.checkAlertThreshold();
} catch (dlqError) {
logger.error('Failed to move job to DLQ', {
jobId: job.id,
error: dlqError,
});
}
}
/**
* Retry jobs from DLQ
*/
async retryDLQJobs(limit = 10): Promise<number> {
const jobs = await this.dlq.getCompleted(0, limit);
let retriedCount = 0;
for (const dlqJob of jobs) {
try {
const { originalJob } = dlqJob.data;
// Re-add to main queue with delay
await this.mainQueue.add(
originalJob.name,
originalJob.data,
{
...originalJob.opts,
delay: this.config.retryDelay,
attempts: this.config.maxRetries,
}
);
// Remove from DLQ
await dlqJob.remove();
retriedCount++;
logger.info('Job retried from DLQ', {
originalJobId: originalJob.id,
jobName: originalJob.name,
});
} catch (error) {
logger.error('Failed to retry DLQ job', {
dlqJobId: dlqJob.id,
error,
});
}
}
return retriedCount;
}
/**
* Get DLQ statistics
*/
async getStats(): Promise<{
total: number;
recent: number;
byJobName: Record<string, number>;
oldestJob: Date | null;
}> {
const [completed, failed, waiting] = await Promise.all([
this.dlq.getCompleted(),
this.dlq.getFailed(),
this.dlq.getWaiting(),
]);
const allJobs = [...completed, ...failed, ...waiting];
const byJobName: Record<string, number> = {};
let oldestTimestamp: number | null = null;
for (const job of allJobs) {
const jobName = job.data.originalJob?.name || 'unknown';
byJobName[jobName] = (byJobName[jobName] || 0) + 1;
if (!oldestTimestamp || job.timestamp < oldestTimestamp) {
oldestTimestamp = job.timestamp;
}
}
// Count recent jobs (last 24 hours)
const oneDayAgo = Date.now() - 24 * 60 * 60 * 1000;
const recent = allJobs.filter(job => job.timestamp > oneDayAgo).length;
return {
total: allJobs.length,
recent,
byJobName,
oldestJob: oldestTimestamp ? new Date(oldestTimestamp) : null,
};
}
/**
* Clean up old DLQ entries
*/
async cleanup(): Promise<number> {
const ageInMs = this.config.cleanupAge * 60 * 60 * 1000;
const cutoffTime = Date.now() - ageInMs;
const jobs = await this.dlq.getCompleted();
let removedCount = 0;
for (const job of jobs) {
if (job.timestamp < cutoffTime) {
await job.remove();
removedCount++;
}
}
logger.info('DLQ cleanup completed', {
removedCount,
cleanupAge: `${this.config.cleanupAge} hours`,
});
return removedCount;
}
/**
* Check if alert threshold is exceeded
*/
private async checkAlertThreshold(): Promise<void> {
const stats = await this.getStats();
if (stats.total >= this.config.alertThreshold) {
logger.error('DLQ alert threshold exceeded', {
threshold: this.config.alertThreshold,
currentCount: stats.total,
byJobName: stats.byJobName,
});
// In a real implementation, this would trigger alerts
}
}
/**
* Get failed jobs for inspection
*/
async inspectFailedJobs(limit = 10): Promise<Array<{
id: string;
name: string;
data: any;
error: any;
failedAt: string;
attempts: number;
}>> {
const jobs = await this.dlq.getCompleted(0, limit);
return jobs.map(job => ({
id: job.data.originalJob.id,
name: job.data.originalJob.name,
data: job.data.originalJob.data,
error: job.data.error,
failedAt: job.data.movedToDLQAt,
attempts: job.data.originalJob.attemptsMade,
}));
}
/**
* Shutdown DLQ handler
*/
async shutdown(): Promise<void> {
await this.dlq.close();
this.failureCount.clear();
}
}

View file

@ -0,0 +1,191 @@
import { getLogger } from '@stock-bot/logger';
import type { JobHandler, HandlerConfig, HandlerConfigWithSchedule, ScheduledJob } from './types';
const logger = getLogger('handler-registry');
class HandlerRegistry {
private handlers = new Map<string, HandlerConfig>();
private handlerSchedules = new Map<string, ScheduledJob[]>();
/**
* Register a handler with its operations (simple config)
*/
register(handlerName: string, config: HandlerConfig): void {
logger.info(`Registering handler: ${handlerName}`, {
operations: Object.keys(config),
});
this.handlers.set(handlerName, config);
}
/**
* Register a handler with operations and scheduled jobs (full config)
*/
registerWithSchedule(config: HandlerConfigWithSchedule): void {
logger.info(`Registering handler with schedule: ${config.name}`, {
operations: Object.keys(config.operations),
scheduledJobs: config.scheduledJobs?.length || 0,
});
this.handlers.set(config.name, config.operations);
if (config.scheduledJobs && config.scheduledJobs.length > 0) {
this.handlerSchedules.set(config.name, config.scheduledJobs);
}
}
/**
* Get a handler for a specific handler and operation
*/
getHandler(handler: string, operation: string): JobHandler | null {
const handlerConfig = this.handlers.get(handler);
if (!handlerConfig) {
logger.warn(`Handler not found: ${handler}`);
return null;
}
const jobHandler = handlerConfig[operation];
if (!jobHandler) {
logger.warn(`Operation not found: ${handler}:${operation}`, {
availableOperations: Object.keys(handlerConfig),
});
return null;
}
return jobHandler;
}
/**
* Get all scheduled jobs from all handlers
*/
getAllScheduledJobs(): Array<{ handler: string; job: ScheduledJob }> {
const allJobs: Array<{ handler: string; job: ScheduledJob }> = [];
for (const [handlerName, jobs] of this.handlerSchedules) {
for (const job of jobs) {
allJobs.push({
handler: handlerName,
job,
});
}
}
return allJobs;
}
/**
* Get scheduled jobs for a specific handler
*/
getScheduledJobs(handler: string): ScheduledJob[] {
return this.handlerSchedules.get(handler) || [];
}
/**
* Check if a handler has scheduled jobs
*/
hasScheduledJobs(handler: string): boolean {
return this.handlerSchedules.has(handler);
}
/**
* Get all registered handlers with their configurations
*/
getHandlerConfigs(): Array<{ name: string; operations: string[]; scheduledJobs: number }> {
return Array.from(this.handlers.keys()).map(name => ({
name,
operations: Object.keys(this.handlers.get(name) || {}),
scheduledJobs: this.handlerSchedules.get(name)?.length || 0,
}));
}
/**
* Get all handlers with their full configurations for queue manager registration
*/
getAllHandlers(): Map<string, { operations: HandlerConfig; scheduledJobs?: ScheduledJob[] }> {
const result = new Map<
string,
{ operations: HandlerConfig; scheduledJobs?: ScheduledJob[] }
>();
for (const [name, operations] of this.handlers) {
const scheduledJobs = this.handlerSchedules.get(name);
result.set(name, {
operations,
scheduledJobs,
});
}
return result;
}
/**
* Get all registered handlers
*/
getHandlers(): string[] {
return Array.from(this.handlers.keys());
}
/**
* Get operations for a specific handler
*/
getOperations(handler: string): string[] {
const handlerConfig = this.handlers.get(handler);
return handlerConfig ? Object.keys(handlerConfig) : [];
}
/**
* Check if a handler exists
*/
hasHandler(handler: string): boolean {
return this.handlers.has(handler);
}
/**
* Check if a handler has a specific operation
*/
hasOperation(handler: string, operation: string): boolean {
const handlerConfig = this.handlers.get(handler);
return handlerConfig ? operation in handlerConfig : false;
}
/**
* Remove a handler
*/
unregister(handler: string): boolean {
this.handlerSchedules.delete(handler);
return this.handlers.delete(handler);
}
/**
* Clear all handlers
*/
clear(): void {
this.handlers.clear();
this.handlerSchedules.clear();
}
/**
* Get registry statistics
*/
getStats(): { handlers: number; totalOperations: number; totalScheduledJobs: number } {
let totalOperations = 0;
let totalScheduledJobs = 0;
for (const config of this.handlers.values()) {
totalOperations += Object.keys(config).length;
}
for (const jobs of this.handlerSchedules.values()) {
totalScheduledJobs += jobs.length;
}
return {
handlers: this.handlers.size,
totalOperations,
totalScheduledJobs,
};
}
}
// Export singleton instance
export const handlerRegistry = new HandlerRegistry();

View file

@ -1,9 +1,12 @@
export * from './batch-processor';
export * from './provider-registry';
export * from './handler-registry';
export * from './queue-manager';
export * from './queue-instance';
export * from './queue-factory';
export * from './types';
export * from './dlq-handler';
export * from './queue-metrics';
export * from './rate-limiter';
// Re-export commonly used functions
export { initializeBatchCache, processBatchJob, processItems } from './batch-processor';
@ -11,7 +14,7 @@ export { initializeBatchCache, processBatchJob, processItems } from './batch-pro
export { QueueManager } from './queue-manager';
export { Queue } from './queue-instance';
export { providerRegistry } from './provider-registry';
export { handlerRegistry } from './handler-registry';
// Re-export queue factory functions
export {
@ -28,9 +31,12 @@ export type {
BatchResult,
JobHandler,
ProcessOptions,
ProviderConfig,
ProviderConfigWithSchedule,
ProviderInitializer,
HandlerConfig,
HandlerConfigWithSchedule,
HandlerInitializer,
QueueConfig,
ScheduledJob,
RateLimitConfig,
RateLimitRule,
DLQConfig,
} from './types';

View file

@ -1,191 +0,0 @@
import { getLogger } from '@stock-bot/logger';
import type { JobHandler, ProviderConfig, ProviderConfigWithSchedule, ScheduledJob } from './types';
const logger = getLogger('provider-registry');
class ProviderRegistry {
private providers = new Map<string, ProviderConfig>();
private providerSchedules = new Map<string, ScheduledJob[]>();
/**
* Register a provider with its operations (simple config)
*/
register(providerName: string, config: ProviderConfig): void {
logger.info(`Registering provider: ${providerName}`, {
operations: Object.keys(config),
});
this.providers.set(providerName, config);
}
/**
* Register a provider with operations and scheduled jobs (full config)
*/
registerWithSchedule(config: ProviderConfigWithSchedule): void {
logger.info(`Registering provider with schedule: ${config.name}`, {
operations: Object.keys(config.operations),
scheduledJobs: config.scheduledJobs?.length || 0,
});
this.providers.set(config.name, config.operations);
if (config.scheduledJobs && config.scheduledJobs.length > 0) {
this.providerSchedules.set(config.name, config.scheduledJobs);
}
}
/**
* Get a handler for a specific provider and operation
*/
getHandler(provider: string, operation: string): JobHandler | null {
const providerConfig = this.providers.get(provider);
if (!providerConfig) {
logger.warn(`Provider not found: ${provider}`);
return null;
}
const handler = providerConfig[operation];
if (!handler) {
logger.warn(`Operation not found: ${provider}:${operation}`, {
availableOperations: Object.keys(providerConfig),
});
return null;
}
return handler;
}
/**
* Get all scheduled jobs from all providers
*/
getAllScheduledJobs(): Array<{ provider: string; job: ScheduledJob }> {
const allJobs: Array<{ provider: string; job: ScheduledJob }> = [];
for (const [providerName, jobs] of this.providerSchedules) {
for (const job of jobs) {
allJobs.push({
provider: providerName,
job,
});
}
}
return allJobs;
}
/**
* Get scheduled jobs for a specific provider
*/
getScheduledJobs(provider: string): ScheduledJob[] {
return this.providerSchedules.get(provider) || [];
}
/**
* Check if a provider has scheduled jobs
*/
hasScheduledJobs(provider: string): boolean {
return this.providerSchedules.has(provider);
}
/**
* Get all registered providers with their configurations
*/
getProviderConfigs(): Array<{ name: string; operations: string[]; scheduledJobs: number }> {
return Array.from(this.providers.keys()).map(name => ({
name,
operations: Object.keys(this.providers.get(name) || {}),
scheduledJobs: this.providerSchedules.get(name)?.length || 0,
}));
}
/**
* Get all providers with their full configurations for queue manager registration
*/
getAllProviders(): Map<string, { operations: ProviderConfig; scheduledJobs?: ScheduledJob[] }> {
const result = new Map<
string,
{ operations: ProviderConfig; scheduledJobs?: ScheduledJob[] }
>();
for (const [name, operations] of this.providers) {
const scheduledJobs = this.providerSchedules.get(name);
result.set(name, {
operations,
scheduledJobs,
});
}
return result;
}
/**
* Get all registered providers
*/
getProviders(): string[] {
return Array.from(this.providers.keys());
}
/**
* Get operations for a specific provider
*/
getOperations(provider: string): string[] {
const providerConfig = this.providers.get(provider);
return providerConfig ? Object.keys(providerConfig) : [];
}
/**
* Check if a provider exists
*/
hasProvider(provider: string): boolean {
return this.providers.has(provider);
}
/**
* Check if a provider has a specific operation
*/
hasOperation(provider: string, operation: string): boolean {
const providerConfig = this.providers.get(provider);
return providerConfig ? operation in providerConfig : false;
}
/**
* Remove a provider
*/
unregister(provider: string): boolean {
this.providerSchedules.delete(provider);
return this.providers.delete(provider);
}
/**
* Clear all providers
*/
clear(): void {
this.providers.clear();
this.providerSchedules.clear();
}
/**
* Get registry statistics
*/
getStats(): { providers: number; totalOperations: number; totalScheduledJobs: number } {
let totalOperations = 0;
let totalScheduledJobs = 0;
for (const config of this.providers.values()) {
totalOperations += Object.keys(config).length;
}
for (const jobs of this.providerSchedules.values()) {
totalScheduledJobs += jobs.length;
}
return {
providers: this.providers.size,
totalOperations,
totalScheduledJobs,
};
}
}
// Export singleton instance
export const providerRegistry = new ProviderRegistry();

View file

@ -5,7 +5,7 @@ import type { ProcessOptions, BatchResult } from './types';
const logger = getLogger('queue-factory');
// Global queue manager (manages workers and providers)
// Global queue manager (manages workers and handlers)
let queueManager: QueueManager | null = null;
// Registry of individual queues
const queues = new Map<string, Queue>();
@ -31,7 +31,7 @@ export async function initializeQueueSystem(config: {
workers: config.workers || 5,
concurrency: config.concurrency || 20,
defaultJobOptions: config.defaultJobOptions,
providers: [], // Will be set by individual services
handlers: [], // Will be set by individual services
});
await queueManager.initialize();

View file

@ -1,8 +1,9 @@
import { Queue as BullQueue, Worker, QueueEvents, type Job } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import { processItems, processBatchJob } from './batch-processor';
import { providerRegistry } from './provider-registry';
import { handlerRegistry } from './handler-registry';
import type { JobData, ProcessOptions, BatchResult, BatchJobData } from './types';
import { getRedisConnection } from './utils';
const logger = getLogger('queue-instance');
@ -14,16 +15,11 @@ export class Queue {
private redisConfig: any;
private initialized = false;
constructor(queueName: string, redisConfig: any) {
constructor(queueName: string, redisConfig: any, options: { startWorker?: boolean } = {}) {
this.queueName = queueName;
this.redisConfig = redisConfig;
const connection = {
host: redisConfig.host,
port: redisConfig.port,
password: redisConfig.password,
db: redisConfig.db,
};
const connection = getRedisConnection(redisConfig);
// Initialize BullMQ queue
this.bullQueue = new BullQueue(`{${queueName}}`, {
@ -42,8 +38,10 @@ export class Queue {
// Initialize queue events
this.queueEvents = new QueueEvents(`{${queueName}}`, { connection });
// Start a worker for this queue
this.startWorker();
// Start a worker for this queue unless explicitly disabled
if (options.startWorker !== false) {
this.startWorker();
}
}
/**
@ -100,7 +98,9 @@ export class Queue {
async addBulk(
jobs: Array<{ name: string; data: JobData; opts?: Record<string, unknown> }>
): Promise<Job[]> {
return await this.bullQueue.addBulk(jobs);
const createdJobs = await this.bullQueue.addBulk(jobs);
return createdJobs;
}
/**
@ -185,12 +185,7 @@ export class Queue {
* Start a worker for this queue
*/
private startWorker(): void {
const connection = {
host: this.redisConfig.host,
port: this.redisConfig.port,
password: this.redisConfig.password,
db: this.redisConfig.db,
};
const connection = getRedisConnection(this.redisConfig);
const worker = new Worker(`{${this.queueName}}`, this.processJob.bind(this), {
connection,
@ -222,11 +217,11 @@ export class Queue {
* Process a job
*/
private async processJob(job: Job) {
const { provider, operation, payload }: JobData = job.data;
const { handler, operation, payload }: JobData = job.data;
logger.info('Processing job', {
id: job.id,
provider,
handler,
operation,
queue: this.queueName,
payloadKeys: Object.keys(payload || {}),
@ -240,18 +235,18 @@ export class Queue {
result = await processBatchJob(payload as BatchJobData, this);
} else {
// Regular handler lookup
const handler = providerRegistry.getHandler(provider, operation);
const jobHandler = handlerRegistry.getHandler(handler, operation);
if (!handler) {
throw new Error(`No handler found for ${provider}:${operation}`);
if (!jobHandler) {
throw new Error(`No handler found for ${handler}:${operation}`);
}
result = await handler(payload);
result = await jobHandler(payload);
}
logger.info('Job completed successfully', {
id: job.id,
provider,
handler,
operation,
queue: this.queueName,
});
@ -260,7 +255,7 @@ export class Queue {
} catch (error) {
logger.error('Job processing failed', {
id: job.id,
provider,
handler,
operation,
queue: this.queueName,
error: error instanceof Error ? error.message : String(error),

View file

@ -1,8 +1,11 @@
import { Queue, QueueEvents, Worker, type Job } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import { processBatchJob } from './batch-processor';
import { providerRegistry } from './provider-registry';
import type { JobData, ProviderConfig, ProviderInitializer, QueueConfig } from './types';
import { DeadLetterQueueHandler } from './dlq-handler';
import { handlerRegistry } from './handler-registry';
import { QueueMetricsCollector } from './queue-metrics';
import { QueueRateLimiter, type RateLimitRule } from './rate-limiter';
import type { HandlerConfig, HandlerInitializer, JobData, QueueConfig } from './types';
import { getRedisConnection } from './utils';
const logger = getLogger('queue-manager');
@ -11,8 +14,11 @@ export class QueueManager {
private workers: Worker[] = [];
private queueEvents!: QueueEvents;
private config: Required<QueueConfig>;
private providers: ProviderInitializer[];
private handlers: HandlerInitializer[];
private enableScheduledJobs: boolean;
private dlqHandler?: DeadLetterQueueHandler;
private metricsCollector?: QueueMetricsCollector;
private rateLimiter?: QueueRateLimiter;
private get isInitialized() {
return !!this.queue;
@ -27,18 +33,18 @@ export class QueueManager {
constructor(config: QueueConfig = {}) {
// Enhanced configuration
this.providers = config.providers || [];
this.handlers = config.handlers || [];
this.enableScheduledJobs = config.enableScheduledJobs ?? true;
// Set default configuration
this.config = {
workers: config.workers || parseInt(process.env.WORKER_COUNT || '5'),
concurrency: config.concurrency || parseInt(process.env.WORKER_CONCURRENCY || '20'),
workers: config.workers ?? 5,
concurrency: config.concurrency ?? 20,
redis: {
host: config.redis?.host || process.env.DRAGONFLY_HOST || 'localhost',
port: config.redis?.port || parseInt(process.env.DRAGONFLY_PORT || '6379'),
password: config.redis?.password || process.env.DRAGONFLY_PASSWORD,
db: config.redis?.db || parseInt(process.env.DRAGONFLY_DB || '0'),
host: config.redis?.host || 'localhost',
port: config.redis?.port || 6379,
password: config.redis?.password || '',
db: config.redis?.db || 0,
},
queueName: config.queueName || 'default-queue',
defaultJobOptions: {
@ -51,13 +57,19 @@ export class QueueManager {
},
...config.defaultJobOptions,
},
providers: this.providers,
handlers: this.handlers,
enableScheduledJobs: this.enableScheduledJobs,
enableRateLimit: config.enableRateLimit || false,
globalRateLimit: config.globalRateLimit,
enableDLQ: config.enableDLQ || false,
dlqConfig: config.dlqConfig,
enableMetrics: config.enableMetrics || false,
rateLimitRules: config.rateLimitRules || [],
};
}
/**
* Initialize the queue manager with enhanced provider and scheduled job support
* Initialize the queue manager with enhanced handler and scheduled job support
*/
async initialize(): Promise<void> {
if (this.isInitialized) {
@ -69,13 +81,13 @@ export class QueueManager {
queueName: this.config.queueName,
workers: this.config.workers,
concurrency: this.config.concurrency,
providers: this.providers.length,
handlers: this.handlers.length,
enableScheduledJobs: this.enableScheduledJobs,
});
try {
// Step 1: Register all providers
await this.registerProviders();
// Step 1: Register all handlers
await this.registerHandlers();
// Step 2: Initialize core queue infrastructure
const connection = this.getConnection();
@ -90,15 +102,39 @@ export class QueueManager {
// Initialize queue events
this.queueEvents = new QueueEvents(queueName, { connection });
// Step 3: Start workers
// Wait for queue to be ready
await this.queue.waitUntilReady();
// Step 3: Initialize DLQ handler if enabled
if (this.config.enableDLQ) {
this.dlqHandler = new DeadLetterQueueHandler(this.queue, connection, this.config.dlqConfig);
}
// Step 4: Initialize metrics collector if enabled
if (this.config.enableMetrics) {
this.metricsCollector = new QueueMetricsCollector(this.queue, this.queueEvents);
}
// Step 5: Initialize rate limiter if enabled
if (this.config.enableRateLimit && this.config.rateLimitRules) {
const redis = await this.getRedisClient();
this.rateLimiter = new QueueRateLimiter(redis);
// Add configured rate limit rules
for (const rule of this.config.rateLimitRules) {
this.rateLimiter.addRule(rule);
}
}
// Step 6: Start workers
await this.startWorkers();
// Step 4: Setup event listeners
// Step 7: Setup event listeners
this.setupEventListeners();
// Step 5: Batch cache will be initialized by individual Queue instances
// Step 8: Batch cache will be initialized by individual Queue instances
// Step 6: Set up scheduled jobs
// Step 9: Set up scheduled jobs
if (this.enableScheduledJobs) {
await this.setupScheduledJobs();
}
@ -111,45 +147,45 @@ export class QueueManager {
}
/**
* Register all configured providers
* Register all configured handlers
*/
private async registerProviders(): Promise<void> {
logger.info('Registering queue providers...', { count: this.providers.length });
private async registerHandlers(): Promise<void> {
logger.info('Registering queue handlers...', { count: this.handlers.length });
// Initialize providers using the configured provider initializers
for (const providerInitializer of this.providers) {
// Initialize handlers using the configured handler initializers
for (const handlerInitializer of this.handlers) {
try {
await providerInitializer();
await handlerInitializer();
} catch (error) {
logger.error('Failed to initialize provider', { error });
logger.error('Failed to initialize handler', { error });
throw error;
}
}
// Now register all providers from the registry with the queue manager
const allProviders = providerRegistry.getAllProviders();
for (const [providerName, config] of allProviders) {
this.registerProvider(providerName, config.operations);
logger.info(`Registered provider: ${providerName}`);
// Now register all handlers from the registry with the queue manager
const allHandlers = handlerRegistry.getAllHandlers();
for (const [handlerName, config] of allHandlers) {
this.registerHandler(handlerName, config.operations);
logger.info(`Registered handler: ${handlerName}`);
}
// Log scheduled jobs
const scheduledJobs = providerRegistry.getAllScheduledJobs();
logger.info(`Registered ${scheduledJobs.length} scheduled jobs across all providers`);
for (const { provider, job } of scheduledJobs) {
const scheduledJobs = handlerRegistry.getAllScheduledJobs();
logger.info(`Registered ${scheduledJobs.length} scheduled jobs across all handlers`);
for (const { handler, job } of scheduledJobs) {
logger.info(
`Scheduled job: ${provider}.${job.type} - ${job.description} (${job.cronPattern})`
`Scheduled job: ${handler}.${job.type} - ${job.description} (${job.cronPattern})`
);
}
logger.info('All providers registered successfully');
logger.info('All handlers registered successfully');
}
/**
* Set up scheduled jobs from provider registry
* Set up scheduled jobs from handler registry
*/
private async setupScheduledJobs(): Promise<void> {
const scheduledJobs = providerRegistry.getAllScheduledJobs();
const scheduledJobs = handlerRegistry.getAllScheduledJobs();
if (scheduledJobs.length === 0) {
logger.info('No scheduled jobs found');
@ -158,17 +194,17 @@ export class QueueManager {
logger.info(`Setting up ${scheduledJobs.length} scheduled jobs...`);
for (const { provider, job } of scheduledJobs) {
for (const { handler, job } of scheduledJobs) {
try {
const jobData: JobData = {
type: job.type,
provider,
handler,
operation: job.operation,
payload: job.payload,
priority: job.priority,
};
await this.add(`recurring-${provider}-${job.operation}`, jobData, {
await this.add(`recurring-${handler}-${job.operation}`, jobData, {
repeat: {
pattern: job.cronPattern,
tz: 'UTC',
@ -184,9 +220,9 @@ export class QueueManager {
},
});
logger.info(`Scheduled job registered: ${provider}.${job.type} (${job.cronPattern})`);
logger.info(`Scheduled job registered: ${handler}.${job.type} (${job.cronPattern})`);
} catch (error) {
logger.error(`Failed to register scheduled job: ${provider}.${job.type}`, { error });
logger.error(`Failed to register scheduled job: ${handler}.${job.type}`, { error });
}
}
@ -194,10 +230,10 @@ export class QueueManager {
}
/**
* Register a provider with its operations
* Register a handler with its operations
*/
registerProvider(providerName: string, config: ProviderConfig): void {
providerRegistry.register(providerName, config);
registerHandler(handlerName: string, config: HandlerConfig): void {
handlerRegistry.register(handlerName, config);
}
/**
@ -290,41 +326,142 @@ export class QueueManager {
return this.config.redis;
}
/**
* Get queue metrics
*/
async getMetrics() {
if (!this.metricsCollector) {
throw new Error('Metrics not enabled. Set enableMetrics: true in config');
}
return this.metricsCollector.collect();
}
/**
* Get metrics report
*/
async getMetricsReport(): Promise<string> {
if (!this.metricsCollector) {
throw new Error('Metrics not enabled. Set enableMetrics: true in config');
}
return this.metricsCollector.getReport();
}
/**
* Get DLQ stats
*/
async getDLQStats() {
if (!this.dlqHandler) {
throw new Error('DLQ not enabled. Set enableDLQ: true in config');
}
return this.dlqHandler.getStats();
}
/**
* Retry jobs from DLQ
*/
async retryDLQJobs(limit = 10) {
if (!this.dlqHandler) {
throw new Error('DLQ not enabled. Set enableDLQ: true in config');
}
return this.dlqHandler.retryDLQJobs(limit);
}
/**
* Add rate limit rule
*/
addRateLimitRule(rule: RateLimitRule): void {
if (!this.rateLimiter) {
throw new Error('Rate limiting not enabled. Set enableRateLimit: true in config');
}
this.rateLimiter.addRule(rule);
}
/**
* Get rate limit status
*/
async getRateLimitStatus(handler: string, operation: string) {
if (!this.rateLimiter) {
throw new Error('Rate limiting not enabled. Set enableRateLimit: true in config');
}
return this.rateLimiter.getStatus(handler, operation);
}
/**
* Shutdown the queue manager
*/
async shutdown(): Promise<void> {
logger.info('Shutting down queue manager...');
const shutdownTasks: Promise<void>[] = [];
try {
// Shutdown DLQ handler
if (this.dlqHandler) {
shutdownTasks.push(
this.dlqHandler.shutdown().catch(err =>
logger.warn('Error shutting down DLQ handler', { error: err })
)
);
}
// Close workers
await Promise.all(this.workers.map(worker => worker.close()));
this.workers = [];
if (this.workers.length > 0) {
shutdownTasks.push(
Promise.all(
this.workers.map(worker =>
worker.close().catch(err =>
logger.warn('Error closing worker', { error: err })
)
)
).then(() => {
this.workers = [];
})
);
}
// Close queue events
if (this.queueEvents) {
await this.queueEvents.close();
shutdownTasks.push(
this.queueEvents.close().catch(err =>
logger.warn('Error closing queue events', { error: err })
)
);
}
// Close queue
if (this.queue) {
await this.queue.close();
shutdownTasks.push(
this.queue.close().catch(err =>
logger.warn('Error closing queue', { error: err })
)
);
}
// Wait for all shutdown tasks with a timeout
await Promise.race([
Promise.all(shutdownTasks),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Shutdown timeout')), 5000)
)
]).catch(err => {
logger.warn('Some shutdown tasks did not complete cleanly', { error: err });
});
logger.info('Queue manager shutdown complete');
} catch (error) {
logger.error('Error during queue manager shutdown', { error });
throw error;
// Don't throw in shutdown to avoid hanging tests
}
}
private getConnection() {
return {
host: this.config.redis.host,
port: this.config.redis.port,
password: this.config.redis.password,
db: this.config.redis.db,
};
return getRedisConnection(this.config.redis);
}
private async getRedisClient() {
// Create a redis client for rate limiting
const Redis = require('ioredis');
return new Redis(this.getConnection());
}
private async startWorkers(): Promise<void> {
@ -359,30 +496,45 @@ export class QueueManager {
}
private async processJob(job: Job) {
const { provider, operation, payload }: JobData = job.data;
const { handler, operation, payload }: JobData = job.data;
logger.info('Processing job', {
id: job.id,
provider,
handler,
operation,
payloadKeys: Object.keys(payload || {}),
});
try {
let result;
// Check rate limits if enabled
if (this.rateLimiter) {
const rateLimit = await this.rateLimiter.checkLimit(handler, operation);
if (!rateLimit.allowed) {
// Reschedule job with delay
const delay = rateLimit.retryAfter || 60000;
logger.warn('Job rate limited, rescheduling', {
id: job.id,
handler,
operation,
retryAfter: delay,
});
// Regular handler lookup
const handler = providerRegistry.getHandler(provider, operation);
if (!handler) {
throw new Error(`No handler found for ${provider}:${operation}`);
throw new Error(`Rate limited. Retry after ${delay}ms`);
}
}
result = await handler(payload);
// Regular handler lookup
const jobHandler = handlerRegistry.getHandler(handler, operation);
if (!jobHandler) {
throw new Error(`No handler found for ${handler}:${operation}`);
}
const result = await jobHandler(payload);
logger.info('Job completed successfully', {
id: job.id,
provider,
handler,
operation,
});
@ -390,10 +542,16 @@ export class QueueManager {
} catch (error) {
logger.error('Job processing failed', {
id: job.id,
provider,
handler,
operation,
error: error instanceof Error ? error.message : String(error),
});
// Handle DLQ if enabled
if (this.dlqHandler && error instanceof Error) {
await this.dlqHandler.handleFailedJob(job, error);
}
throw error;
}
}

View file

@ -0,0 +1,327 @@
import { Queue, QueueEvents } from 'bullmq';
import { getLogger } from '@stock-bot/logger';
import type { Job } from 'bullmq';
const logger = getLogger('queue-metrics');
export interface QueueMetrics {
// Job counts
waiting: number;
active: number;
completed: number;
failed: number;
delayed: number;
paused?: number;
// Performance metrics
processingTime: {
avg: number;
min: number;
max: number;
p95: number;
p99: number;
};
// Throughput
throughput: {
completedPerMinute: number;
failedPerMinute: number;
totalPerMinute: number;
};
// Job age
oldestWaitingJob: Date | null;
// Health
isHealthy: boolean;
healthIssues: string[];
}
export class QueueMetricsCollector {
private processingTimes: number[] = [];
private completedTimestamps: number[] = [];
private failedTimestamps: number[] = [];
private readonly maxSamples = 1000;
private readonly metricsInterval = 60000; // 1 minute
constructor(
private queue: Queue,
private queueEvents: QueueEvents
) {
this.setupEventListeners();
}
/**
* Setup event listeners for metrics collection
*/
private setupEventListeners(): void {
this.queueEvents.on('completed', ({ jobId, returnvalue, prev }) => {
// Record completion
this.completedTimestamps.push(Date.now());
this.cleanupOldTimestamps();
});
this.queueEvents.on('failed', ({ jobId, failedReason, prev }) => {
// Record failure
this.failedTimestamps.push(Date.now());
this.cleanupOldTimestamps();
});
// Track processing times
this.queueEvents.on('active', async ({ jobId, prev }) => {
const job = await this.getJob(jobId);
if (job) {
(job as any)._startTime = Date.now();
}
});
this.queueEvents.on('completed', async ({ jobId }) => {
const job = await this.getJob(jobId);
if (job && (job as any)._startTime) {
const processingTime = Date.now() - (job as any)._startTime;
this.recordProcessingTime(processingTime);
}
});
}
/**
* Get job by ID
*/
private async getJob(jobId: string): Promise<Job | undefined> {
try {
return await this.queue.getJob(jobId) || undefined;
} catch {
return undefined;
}
}
/**
* Record processing time
*/
private recordProcessingTime(time: number): void {
this.processingTimes.push(time);
// Keep only recent samples
if (this.processingTimes.length > this.maxSamples) {
this.processingTimes = this.processingTimes.slice(-this.maxSamples);
}
}
/**
* Clean up old timestamps
*/
private cleanupOldTimestamps(): void {
const cutoff = Date.now() - this.metricsInterval;
this.completedTimestamps = this.completedTimestamps.filter(ts => ts > cutoff);
this.failedTimestamps = this.failedTimestamps.filter(ts => ts > cutoff);
}
/**
* Collect current metrics
*/
async collect(): Promise<QueueMetrics> {
// Get job counts
const [waiting, active, completed, failed, delayed] = await Promise.all([
this.queue.getWaitingCount(),
this.queue.getActiveCount(),
this.queue.getCompletedCount(),
this.queue.getFailedCount(),
this.queue.getDelayedCount(),
]);
// BullMQ doesn't have getPausedCount, check if queue is paused
const paused = await this.queue.isPaused() ? waiting : 0;
// Calculate processing time metrics
const processingTime = this.calculateProcessingTimeMetrics();
// Calculate throughput
const throughput = this.calculateThroughput();
// Get oldest waiting job
const oldestWaitingJob = await this.getOldestWaitingJob();
// Check health
const { isHealthy, healthIssues } = this.checkHealth({
waiting,
active,
failed,
processingTime,
});
return {
waiting,
active,
completed,
failed,
delayed,
paused,
processingTime,
throughput,
oldestWaitingJob,
isHealthy,
healthIssues,
};
}
/**
* Calculate processing time metrics
*/
private calculateProcessingTimeMetrics(): QueueMetrics['processingTime'] {
if (this.processingTimes.length === 0) {
return { avg: 0, min: 0, max: 0, p95: 0, p99: 0 };
}
const sorted = [...this.processingTimes].sort((a, b) => a - b);
const sum = sorted.reduce((acc, val) => acc + val, 0);
return {
avg: Math.round(sum / sorted.length),
min: sorted[0],
max: sorted[sorted.length - 1],
p95: sorted[Math.floor(sorted.length * 0.95)],
p99: sorted[Math.floor(sorted.length * 0.99)],
};
}
/**
* Calculate throughput metrics
*/
private calculateThroughput(): QueueMetrics['throughput'] {
const now = Date.now();
const oneMinuteAgo = now - 60000;
const completedPerMinute = this.completedTimestamps.filter(ts => ts > oneMinuteAgo).length;
const failedPerMinute = this.failedTimestamps.filter(ts => ts > oneMinuteAgo).length;
return {
completedPerMinute,
failedPerMinute,
totalPerMinute: completedPerMinute + failedPerMinute,
};
}
/**
* Get oldest waiting job
*/
private async getOldestWaitingJob(): Promise<Date | null> {
const waitingJobs = await this.queue.getWaiting(0, 1);
if (waitingJobs.length > 0) {
return new Date(waitingJobs[0].timestamp);
}
return null;
}
/**
* Check queue health
*/
private checkHealth(metrics: {
waiting: number;
active: number;
failed: number;
processingTime: QueueMetrics['processingTime'];
}): { isHealthy: boolean; healthIssues: string[] } {
const issues: string[] = [];
// Check for high failure rate
const failureRate = metrics.failed / (metrics.failed + this.completedTimestamps.length);
if (failureRate > 0.1) {
issues.push(`High failure rate: ${(failureRate * 100).toFixed(1)}%`);
}
// Check for queue backlog
if (metrics.waiting > 1000) {
issues.push(`Large queue backlog: ${metrics.waiting} jobs waiting`);
}
// Check for slow processing
if (metrics.processingTime.avg > 30000) { // 30 seconds
issues.push(`Slow average processing time: ${(metrics.processingTime.avg / 1000).toFixed(1)}s`);
}
// Check for stalled active jobs
if (metrics.active > 100) {
issues.push(`High number of active jobs: ${metrics.active}`);
}
return {
isHealthy: issues.length === 0,
healthIssues: issues,
};
}
/**
* Get formatted metrics report
*/
async getReport(): Promise<string> {
const metrics = await this.collect();
return `
Queue Metrics Report
===================
Status: ${metrics.isHealthy ? '✅ Healthy' : '⚠️ Issues Detected'}
Job Counts:
- Waiting: ${metrics.waiting}
- Active: ${metrics.active}
- Completed: ${metrics.completed}
- Failed: ${metrics.failed}
- Delayed: ${metrics.delayed}
- Paused: ${metrics.paused}
Performance:
- Avg Processing Time: ${(metrics.processingTime.avg / 1000).toFixed(2)}s
- Min/Max: ${(metrics.processingTime.min / 1000).toFixed(2)}s / ${(metrics.processingTime.max / 1000).toFixed(2)}s
- P95/P99: ${(metrics.processingTime.p95 / 1000).toFixed(2)}s / ${(metrics.processingTime.p99 / 1000).toFixed(2)}s
Throughput:
- Completed/min: ${metrics.throughput.completedPerMinute}
- Failed/min: ${metrics.throughput.failedPerMinute}
- Total/min: ${metrics.throughput.totalPerMinute}
${metrics.oldestWaitingJob ? `Oldest Waiting Job: ${metrics.oldestWaitingJob.toISOString()}` : 'No waiting jobs'}
${metrics.healthIssues.length > 0 ? `\nHealth Issues:\n${metrics.healthIssues.map(issue => `- ${issue}`).join('\n')}` : ''}
`.trim();
}
/**
* Export metrics in Prometheus format
*/
async getPrometheusMetrics(): Promise<string> {
const metrics = await this.collect();
const queueName = this.queue.name;
return `
# HELP queue_jobs_total Total number of jobs by status
# TYPE queue_jobs_total gauge
queue_jobs_total{queue="${queueName}",status="waiting"} ${metrics.waiting}
queue_jobs_total{queue="${queueName}",status="active"} ${metrics.active}
queue_jobs_total{queue="${queueName}",status="completed"} ${metrics.completed}
queue_jobs_total{queue="${queueName}",status="failed"} ${metrics.failed}
queue_jobs_total{queue="${queueName}",status="delayed"} ${metrics.delayed}
queue_jobs_total{queue="${queueName}",status="paused"} ${metrics.paused}
# HELP queue_processing_time_seconds Job processing time in seconds
# TYPE queue_processing_time_seconds summary
queue_processing_time_seconds{queue="${queueName}",quantile="0.5"} ${(metrics.processingTime.avg / 1000).toFixed(3)}
queue_processing_time_seconds{queue="${queueName}",quantile="0.95"} ${(metrics.processingTime.p95 / 1000).toFixed(3)}
queue_processing_time_seconds{queue="${queueName}",quantile="0.99"} ${(metrics.processingTime.p99 / 1000).toFixed(3)}
queue_processing_time_seconds_sum{queue="${queueName}"} ${(metrics.processingTime.avg * this.processingTimes.length / 1000).toFixed(3)}
queue_processing_time_seconds_count{queue="${queueName}"} ${this.processingTimes.length}
# HELP queue_throughput_per_minute Jobs processed per minute
# TYPE queue_throughput_per_minute gauge
queue_throughput_per_minute{queue="${queueName}",status="completed"} ${metrics.throughput.completedPerMinute}
queue_throughput_per_minute{queue="${queueName}",status="failed"} ${metrics.throughput.failedPerMinute}
queue_throughput_per_minute{queue="${queueName}",status="total"} ${metrics.throughput.totalPerMinute}
# HELP queue_health Queue health status
# TYPE queue_health gauge
queue_health{queue="${queueName}"} ${metrics.isHealthy ? 1 : 0}
`.trim();
}
}

View file

@ -0,0 +1,295 @@
import { RateLimiterRedis, RateLimiterRes } from 'rate-limiter-flexible';
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('rate-limiter');
export interface RateLimitConfig {
points: number; // Number of requests
duration: number; // Per duration in seconds
blockDuration?: number; // Block duration in seconds
keyPrefix?: string;
}
export interface RateLimitRule {
level: 'global' | 'handler' | 'operation';
handler?: string;
operation?: string;
config: RateLimitConfig;
}
export class QueueRateLimiter {
private limiters = new Map<string, RateLimiterRedis>();
private rules: RateLimitRule[] = [];
constructor(private redisClient: any) {}
/**
* Add a rate limit rule
*/
addRule(rule: RateLimitRule): void {
this.rules.push(rule);
const key = this.getRuleKey(rule.level, rule.handler, rule.operation);
const limiter = new RateLimiterRedis({
storeClient: this.redisClient,
keyPrefix: rule.config.keyPrefix || `rl:${key}`,
points: rule.config.points,
duration: rule.config.duration,
blockDuration: rule.config.blockDuration || 0,
});
this.limiters.set(key, limiter);
logger.info('Rate limit rule added', {
level: rule.level,
handler: rule.handler,
operation: rule.operation,
points: rule.config.points,
duration: rule.config.duration,
});
}
/**
* Check if a job can be processed based on rate limits
*/
async checkLimit(handler: string, operation: string): Promise<{
allowed: boolean;
retryAfter?: number;
remainingPoints?: number;
}> {
const limiters = this.getApplicableLimiters(handler, operation);
if (limiters.length === 0) {
return { allowed: true };
}
try {
// Check all applicable rate limiters
const results = await Promise.all(
limiters.map(({ limiter, key }) => this.consumePoint(limiter, key))
);
// All limiters must allow the request
const blocked = results.find(r => !r.allowed);
if (blocked) {
return blocked;
}
// Return the most restrictive remaining points
const minRemainingPoints = Math.min(...results.map(r => r.remainingPoints || Infinity));
return {
allowed: true,
remainingPoints: minRemainingPoints === Infinity ? undefined : minRemainingPoints,
};
} catch (error) {
logger.error('Rate limit check failed', { handler, operation, error });
// On error, allow the request to proceed
return { allowed: true };
}
}
/**
* Consume a point from the rate limiter
*/
private async consumePoint(
limiter: RateLimiterRedis,
key: string
): Promise<{ allowed: boolean; retryAfter?: number; remainingPoints?: number }> {
try {
const result = await limiter.consume(key);
return {
allowed: true,
remainingPoints: result.remainingPoints,
};
} catch (rejRes) {
if (rejRes instanceof RateLimiterRes) {
logger.warn('Rate limit exceeded', {
key,
retryAfter: rejRes.msBeforeNext,
});
return {
allowed: false,
retryAfter: rejRes.msBeforeNext,
remainingPoints: rejRes.remainingPoints,
};
}
throw rejRes;
}
}
/**
* Get applicable rate limiters for a handler/operation
*/
private getApplicableLimiters(handler: string, operation: string): Array<{ limiter: RateLimiterRedis; key: string }> {
const applicable: Array<{ limiter: RateLimiterRedis; key: string }> = [];
for (const rule of this.rules) {
let applies = false;
let consumerKey = '';
switch (rule.level) {
case 'global':
// Global limit applies to all
applies = true;
consumerKey = 'global';
break;
case 'handler':
// Handler limit applies if handler matches
if (rule.handler === handler) {
applies = true;
consumerKey = handler;
}
break;
case 'operation':
// Operation limit applies if both handler and operation match
if (rule.handler === handler && rule.operation === operation) {
applies = true;
consumerKey = `${handler}:${operation}`;
}
break;
}
if (applies) {
const ruleKey = this.getRuleKey(rule.level, rule.handler, rule.operation);
const limiter = this.limiters.get(ruleKey);
if (limiter) {
applicable.push({ limiter, key: consumerKey });
}
}
}
return applicable;
}
/**
* Get rule key
*/
private getRuleKey(level: string, handler?: string, operation?: string): string {
switch (level) {
case 'global':
return 'global';
case 'handler':
return `handler:${handler}`;
case 'operation':
return `operation:${handler}:${operation}`;
default:
return level;
}
}
/**
* Get current rate limit status for a handler/operation
*/
async getStatus(handler: string, operation: string): Promise<{
handler: string;
operation: string;
limits: Array<{
level: string;
points: number;
duration: number;
remaining: number;
resetIn: number;
}>;
}> {
const applicable = this.getApplicableLimiters(handler, operation);
const limits = await Promise.all(
applicable.map(async ({ limiter, key }) => {
const rule = this.rules.find(r => {
const ruleKey = this.getRuleKey(r.level, r.handler, r.operation);
return this.limiters.get(ruleKey) === limiter;
});
try {
const result = await limiter.get(key);
if (!result) {
return {
level: rule?.level || 'unknown',
points: limiter.points,
duration: limiter.duration,
remaining: limiter.points,
resetIn: 0,
};
}
return {
level: rule?.level || 'unknown',
points: limiter.points,
duration: limiter.duration,
remaining: result.remainingPoints,
resetIn: result.msBeforeNext,
};
} catch (error) {
return {
level: rule?.level || 'unknown',
points: limiter.points,
duration: limiter.duration,
remaining: 0,
resetIn: 0,
};
}
})
);
return {
handler,
operation,
limits,
};
}
/**
* Reset rate limits for a handler/operation
*/
async reset(handler: string, operation?: string): Promise<void> {
const applicable = operation
? this.getApplicableLimiters(handler, operation)
: this.rules
.filter(r => !handler || r.handler === handler)
.map(r => {
const key = this.getRuleKey(r.level, r.handler, r.operation);
const limiter = this.limiters.get(key);
return limiter ? { limiter, key: handler || 'global' } : null;
})
.filter(Boolean) as Array<{ limiter: RateLimiterRedis; key: string }>;
await Promise.all(
applicable.map(({ limiter, key }) => limiter.delete(key))
);
logger.info('Rate limits reset', { handler, operation });
}
/**
* Get all configured rate limit rules
*/
getRules(): RateLimitRule[] {
return [...this.rules];
}
/**
* Remove a rate limit rule
*/
removeRule(level: string, handler?: string, operation?: string): boolean {
const key = this.getRuleKey(level, handler, operation);
const ruleIndex = this.rules.findIndex(r =>
r.level === level &&
(!handler || r.handler === handler) &&
(!operation || r.operation === operation)
);
if (ruleIndex >= 0) {
this.rules.splice(ruleIndex, 1);
this.limiters.delete(key);
logger.info('Rate limit rule removed', { level, handler, operation });
return true;
}
return false;
}
}

View file

@ -1,7 +1,7 @@
// Types for queue operations
export interface JobData {
type?: string;
provider: string;
handler: string;
operation: string;
payload: any;
priority?: number;
@ -17,7 +17,7 @@ export interface ProcessOptions {
removeOnComplete?: number;
removeOnFail?: number;
// Job routing information
provider?: string;
handler?: string;
operation?: string;
// Optional queue for overloaded function signatures
queue?: any; // QueueManager reference
@ -50,8 +50,15 @@ export interface QueueConfig {
delay: number;
};
};
providers?: ProviderInitializer[];
handlers?: HandlerInitializer[];
enableScheduledJobs?: boolean;
// Rate limiting
enableRateLimit?: boolean;
globalRateLimit?: RateLimitConfig;
enableDLQ?: boolean;
dlqConfig?: DLQConfig;
enableMetrics?: boolean;
rateLimitRules?: RateLimitRule[];
}
export interface JobHandler {
@ -69,14 +76,17 @@ export interface ScheduledJob {
delay?: number;
}
export interface ProviderConfig {
export interface HandlerConfig {
[operation: string]: JobHandler;
}
export interface ProviderConfigWithSchedule {
export interface HandlerConfigWithSchedule {
name: string;
operations: Record<string, JobHandler>;
scheduledJobs?: ScheduledJob[];
// Rate limiting
rateLimit?: RateLimitConfig;
operationLimits?: Record<string, RateLimitConfig>;
}
export interface BatchJobData {
@ -86,6 +96,28 @@ export interface BatchJobData {
itemCount: number;
}
export interface ProviderInitializer {
export interface HandlerInitializer {
(): void | Promise<void>;
}
// Rate limiting types
export interface RateLimitConfig {
points: number;
duration: number;
blockDuration?: number;
}
export interface RateLimitRule {
level: 'global' | 'handler' | 'operation';
handler?: string;
operation?: string;
config: RateLimitConfig;
}
// DLQ types
export interface DLQConfig {
maxRetries?: number;
retryDelay?: number;
alertThreshold?: number;
cleanupAge?: number;
}

31
libs/queue/src/utils.ts Normal file
View file

@ -0,0 +1,31 @@
/**
* Get Redis connection configuration with retry settings
*/
export function getRedisConnection(config: {
host: string;
port: number;
password?: string;
db?: number;
}) {
const isTest = process.env.NODE_ENV === 'test' || process.env.BUNIT === '1';
return {
host: config.host,
port: config.port,
password: config.password,
db: config.db,
maxRetriesPerRequest: null, // Required by BullMQ
enableReadyCheck: false,
connectTimeout: isTest ? 1000 : 3000,
lazyConnect: true,
keepAlive: false,
retryStrategy: (times: number) => {
const maxRetries = isTest ? 1 : 3;
if (times > maxRetries) {
return null; // Stop retrying
}
const delay = isTest ? 100 : Math.min(times * 100, 3000);
return delay;
},
};
}

View file

@ -1,85 +0,0 @@
#!/usr/bin/env bun
// Quick test of the simplified API
import { initializeBatchCache, processItems, QueueManager } from './src/index.js';
async function testSimplifiedAPI() {
console.log('🚀 Testing simplified queue API...');
// Create queue manager
const queueManager = new QueueManager({
queueName: 'di2',
workers: 2,
concurrency: 2,
});
// Register a simple provider
queueManager.registerProvider('test-provider', {
'process-item': async payload => {
console.log(`✅ Processing item: ${JSON.stringify(payload)}`);
await new Promise(resolve => setTimeout(resolve, 100));
return { processed: true, originalData: payload };
},
});
try {
await queueManager.initialize();
await initializeBatchCache(queueManager);
console.log('📋 Testing with simple array...');
// Test 1: Simple array of numbers
const numbers = [1, 2, 3, 4, 5];
const result1 = await processItems(numbers, queueManager, {
totalDelayHours: 0.0014, // ~5 seconds (5/3600 hours)
useBatching: false,
provider: 'test-provider',
operation: 'process-item',
});
console.log('🎯 Numbers result:', result1);
// Test 2: Array of objects
const objects = [
{ id: 1, name: 'Item 1' },
{ id: 2, name: 'Item 2' },
{ id: 3, name: 'Item 3' },
];
const result2 = await processItems(objects, queueManager, {
totalDelayHours: 0.0014, // ~5 seconds
useBatching: true,
batchSize: 2,
provider: 'test-provider',
operation: 'process-item',
});
console.log('🎯 Objects result:', result2);
// Test 3: Array of strings (symbols)
const symbols = Array.from({ length: 1000 }, (_, i) => `Symbol-${i + 1}`);
console.log('📋 Testing with symbols...');
const result3 = await processItems(symbols, queueManager, {
totalDelayHours: 0.0008, // ~3 seconds
useBatching: true,
batchSize: 1,
provider: 'test-provider',
operation: 'process-item',
});
console.log('🎯 Symbols result:', result3);
console.log('✨ All tests completed successfully!');
console.log('🏆 The simplified API is working correctly!');
} catch (error) {
console.error('❌ Test failed:', error);
} finally {
// Clean shutdown
setTimeout(async () => {
await queueManager.shutdown();
console.log('🔄 Shutdown complete');
process.exit(0);
}, 10000000);
}
}
testSimplifiedAPI().catch(console.error);

View file

@ -0,0 +1,354 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { QueueManager, Queue, handlerRegistry, processItems, initializeBatchCache } from '../src';
// Suppress Redis connection errors in tests
process.on('unhandledRejection', (reason, promise) => {
if (reason && typeof reason === 'object' && 'message' in reason) {
const message = (reason as Error).message;
if (message.includes('Connection is closed') ||
message.includes('Connection is in monitoring mode')) {
return;
}
}
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
});
describe('Batch Processor', () => {
let queueManager: QueueManager;
let queue: Queue;
const redisConfig = {
host: 'localhost',
port: 6379,
password: '',
db: 0,
};
beforeEach(async () => {
// Clear handler registry
handlerRegistry.clear();
// Register test handler
handlerRegistry.register('batch-test', {
'process-item': async (payload) => {
return { processed: true, data: payload };
},
'generic': async (payload) => {
return { processed: true, data: payload };
},
'process-batch-items': async (batchData) => {
// This is called by the batch processor internally
return { batchProcessed: true };
},
});
// Use unique queue name per test to avoid conflicts
const uniqueQueueName = `batch-test-queue-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
// Initialize queue manager with no workers to prevent immediate processing
queueManager = new QueueManager({
queueName: uniqueQueueName,
redis: redisConfig,
workers: 0, // No workers in tests
concurrency: 5,
});
await queueManager.initialize();
// Create Queue instance without worker to prevent immediate job processing
queue = new Queue(queueManager.getQueueName(), queueManager.getRedisConfig(), { startWorker: false });
await initializeBatchCache(queue);
// Ensure completely clean state - wait for queue to be ready first
await queue.getBullQueue().waitUntilReady();
// Clear all job states
await queue.getBullQueue().drain(true);
await queue.getBullQueue().clean(0, 1000, 'completed');
await queue.getBullQueue().clean(0, 1000, 'failed');
await queue.getBullQueue().clean(0, 1000, 'active');
await queue.getBullQueue().clean(0, 1000, 'waiting');
await queue.getBullQueue().clean(0, 1000, 'delayed');
// Add a small delay to ensure cleanup is complete
await new Promise(resolve => setTimeout(resolve, 50));
});
afterEach(async () => {
try {
// Clean up jobs first
if (queue) {
try {
await queue.getBullQueue().drain(true);
await queue.getBullQueue().clean(0, 1000, 'completed');
await queue.getBullQueue().clean(0, 1000, 'failed');
await queue.getBullQueue().clean(0, 1000, 'active');
await queue.getBullQueue().clean(0, 1000, 'waiting');
await queue.getBullQueue().clean(0, 1000, 'delayed');
} catch (error) {
// Ignore cleanup errors
}
await queue.shutdown();
}
if (queueManager) {
await Promise.race([
queueManager.shutdown(),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Shutdown timeout')), 3000)
)
]);
}
} catch (error) {
console.warn('Cleanup error:', error.message);
} finally {
handlerRegistry.clear();
await new Promise(resolve => setTimeout(resolve, 100));
}
});
describe('Direct Processing', () => {
test('should process items directly without batching', async () => {
const items = ['item1', 'item2', 'item3', 'item4', 'item5'];
const result = await processItems(items, queue, {
totalDelayHours: 0.001, // 3.6 seconds total
useBatching: false,
handler: 'batch-test',
operation: 'process-item',
priority: 1,
});
expect(result.mode).toBe('direct');
expect(result.totalItems).toBe(5);
expect(result.jobsCreated).toBe(5);
// Verify jobs were created - BullMQ has an issue where job ID "1" doesn't show up in state queries
// but exists when queried directly, so we need to check both ways
const [delayedJobs, waitingJobs, activeJobs, completedJobs, failedJobs, job1] = await Promise.all([
queue.getBullQueue().getJobs(['delayed']),
queue.getBullQueue().getJobs(['waiting']),
queue.getBullQueue().getJobs(['active']),
queue.getBullQueue().getJobs(['completed']),
queue.getBullQueue().getJobs(['failed']),
queue.getBullQueue().getJob('1'), // Job 1 often doesn't show up in state queries
]);
const jobs = [...delayedJobs, ...waitingJobs, ...activeJobs, ...completedJobs, ...failedJobs];
const ourJobs = jobs.filter(j => j.name === 'process-item' && j.data.handler === 'batch-test');
// Include job 1 if we found it directly but it wasn't in the state queries
if (job1 && job1.name === 'process-item' && job1.data.handler === 'batch-test' && !ourJobs.find(j => j.id === '1')) {
ourJobs.push(job1);
}
expect(ourJobs.length).toBe(5);
// Check delays are distributed
const delays = ourJobs.map(j => j.opts.delay || 0).sort((a, b) => a - b);
expect(delays[0]).toBe(0);
expect(delays[4]).toBeGreaterThan(delays[0]);
});
test('should process complex objects directly', async () => {
const items = [
{ id: 1, name: 'Product A', price: 100 },
{ id: 2, name: 'Product B', price: 200 },
{ id: 3, name: 'Product C', price: 300 },
];
const result = await processItems(items, queue, {
totalDelayHours: 0.001,
useBatching: false,
handler: 'batch-test',
operation: 'process-item',
});
expect(result.jobsCreated).toBe(3);
// Check job payloads
const jobs = await queue.getBullQueue().getJobs(['waiting', 'delayed']);
const ourJobs = jobs.filter(j => j.name === 'process-item' && j.data.handler === 'batch-test');
const payloads = ourJobs.map(j => j.data.payload);
expect(payloads).toContainEqual({ id: 1, name: 'Product A', price: 100 });
expect(payloads).toContainEqual({ id: 2, name: 'Product B', price: 200 });
expect(payloads).toContainEqual({ id: 3, name: 'Product C', price: 300 });
});
});
describe('Batch Processing', () => {
test('should process items in batches', async () => {
const items = Array.from({ length: 50 }, (_, i) => ({ id: i, value: `item-${i}` }));
const result = await processItems(items, queue, {
totalDelayHours: 0.001,
useBatching: true,
batchSize: 10,
handler: 'batch-test',
operation: 'process-item',
});
expect(result.mode).toBe('batch');
expect(result.totalItems).toBe(50);
expect(result.batchesCreated).toBe(5); // 50 items / 10 per batch
expect(result.jobsCreated).toBe(5); // 5 batch jobs
// Verify batch jobs were created
const jobs = await queue.getBullQueue().getJobs(['delayed', 'waiting']);
const batchJobs = jobs.filter(j => j.name === 'process-batch');
expect(batchJobs.length).toBe(5);
});
test('should handle different batch sizes', async () => {
const items = Array.from({ length: 23 }, (_, i) => i);
const result = await processItems(items, queue, {
totalDelayHours: 0.001,
useBatching: true,
batchSize: 7,
handler: 'batch-test',
operation: 'process-item',
});
expect(result.batchesCreated).toBe(4); // 23/7 = 3.28, rounded up to 4
expect(result.jobsCreated).toBe(4);
});
test('should store batch payloads in cache', async () => {
const items = [
{ type: 'A', data: 'test1' },
{ type: 'B', data: 'test2' },
];
const result = await processItems(items, queue, {
totalDelayHours: 0.001,
useBatching: true,
batchSize: 2,
handler: 'batch-test',
operation: 'process-item',
ttl: 3600, // 1 hour TTL
});
expect(result.jobsCreated).toBe(1);
// Get the batch job
const jobs = await queue.getBullQueue().getJobs(['waiting', 'delayed']);
expect(jobs.length).toBe(1);
const batchJob = jobs[0];
expect(batchJob.data.payload.payloadKey).toBeDefined();
expect(batchJob.data.payload.itemCount).toBe(2);
});
});
describe('Empty and Edge Cases', () => {
test('should handle empty item list', async () => {
const result = await processItems([], queue, {
totalDelayHours: 1,
handler: 'batch-test',
operation: 'process-item',
});
expect(result.totalItems).toBe(0);
expect(result.jobsCreated).toBe(0);
expect(result.duration).toBeDefined();
});
test('should handle single item', async () => {
const result = await processItems(['single-item'], queue, {
totalDelayHours: 0.001,
handler: 'batch-test',
operation: 'process-item',
});
expect(result.totalItems).toBe(1);
expect(result.jobsCreated).toBe(1);
});
test('should handle large batch with delays', async () => {
const items = Array.from({ length: 100 }, (_, i) => ({ index: i }));
const result = await processItems(items, queue, {
totalDelayHours: 0.01, // 36 seconds total
useBatching: true,
batchSize: 25,
handler: 'batch-test',
operation: 'process-item',
});
expect(result.batchesCreated).toBe(4); // 100/25
expect(result.jobsCreated).toBe(4);
// Check delays are distributed
const jobs = await queue.getBullQueue().getJobs(['delayed', 'waiting']);
const delays = jobs.map(j => j.opts.delay || 0).sort((a, b) => a - b);
expect(delays[0]).toBe(0); // First batch has no delay
expect(delays[3]).toBeGreaterThan(0); // Last batch has delay
});
});
describe('Job Options', () => {
test('should respect custom job options', async () => {
const items = ['a', 'b', 'c'];
await processItems(items, queue, {
totalDelayHours: 0,
handler: 'batch-test',
operation: 'process-item',
priority: 5,
retries: 10,
removeOnComplete: 100,
removeOnFail: 50,
});
// Check all states including job ID "1" specifically (as it often doesn't show up in state queries)
const [waitingJobs, delayedJobs, job1, job2, job3] = await Promise.all([
queue.getBullQueue().getJobs(['waiting']),
queue.getBullQueue().getJobs(['delayed']),
queue.getBullQueue().getJob('1'),
queue.getBullQueue().getJob('2'),
queue.getBullQueue().getJob('3'),
]);
const jobs = [...waitingJobs, ...delayedJobs];
// Add any missing jobs that exist but don't show up in state queries
[job1, job2, job3].forEach(job => {
if (job && !jobs.find(j => j.id === job.id)) {
jobs.push(job);
}
});
expect(jobs.length).toBe(3);
jobs.forEach(job => {
expect(job.opts.priority).toBe(5);
expect(job.opts.attempts).toBe(10);
expect(job.opts.removeOnComplete).toBe(100);
expect(job.opts.removeOnFail).toBe(50);
});
});
test('should set handler and operation correctly', async () => {
// Register custom handler for this test
handlerRegistry.register('custom-handler', {
'custom-operation': async (payload) => {
return { processed: true, data: payload };
},
});
await processItems(['test'], queue, {
totalDelayHours: 0,
handler: 'custom-handler',
operation: 'custom-operation',
});
const jobs = await queue.getBullQueue().getJobs(['waiting']);
expect(jobs.length).toBe(1);
expect(jobs[0].data.handler).toBe('custom-handler');
expect(jobs[0].data.operation).toBe('custom-operation');
});
});
});

View file

@ -0,0 +1,357 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { Queue, Worker, Job } from 'bullmq';
import { DeadLetterQueueHandler } from '../src/dlq-handler';
import { getRedisConnection } from '../src/utils';
// Suppress Redis connection errors in tests
process.on('unhandledRejection', (reason, promise) => {
if (reason && typeof reason === 'object' && 'message' in reason) {
const message = (reason as Error).message;
if (message.includes('Connection is closed') ||
message.includes('Connection is in monitoring mode')) {
return;
}
}
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
});
describe('DeadLetterQueueHandler', () => {
let mainQueue: Queue;
let dlqHandler: DeadLetterQueueHandler;
let worker: Worker;
let connection: any;
const redisConfig = {
host: 'localhost',
port: 6379,
password: '',
db: 0,
};
beforeEach(async () => {
connection = getRedisConnection(redisConfig);
// Create main queue
mainQueue = new Queue('test-queue', { connection });
// Create DLQ handler
dlqHandler = new DeadLetterQueueHandler(mainQueue, connection, {
maxRetries: 3,
retryDelay: 100,
alertThreshold: 5,
cleanupAge: 24,
});
});
afterEach(async () => {
try {
if (worker) {
await worker.close();
}
await dlqHandler.shutdown();
await mainQueue.close();
} catch (error) {
// Ignore cleanup errors
}
await new Promise(resolve => setTimeout(resolve, 50));
});
describe('Failed Job Handling', () => {
test('should move job to DLQ after max retries', async () => {
let attemptCount = 0;
// Create worker that always fails
worker = new Worker('test-queue', async () => {
attemptCount++;
throw new Error('Job failed');
}, {
connection,
autorun: false,
});
// Add job with limited attempts
const job = await mainQueue.add('failing-job', { test: true }, {
attempts: 3,
backoff: { type: 'fixed', delay: 50 },
});
// Process job manually
await worker.run();
// Wait for retries
await new Promise(resolve => setTimeout(resolve, 300));
// Job should have failed 3 times
expect(attemptCount).toBe(3);
// Check if job was moved to DLQ
const dlqStats = await dlqHandler.getStats();
expect(dlqStats.total).toBe(1);
expect(dlqStats.byJobName['failing-job']).toBe(1);
});
test('should track failure count correctly', async () => {
const job = await mainQueue.add('test-job', { data: 'test' });
const error = new Error('Test error');
// Simulate multiple failures
await dlqHandler.handleFailedJob(job, error);
await dlqHandler.handleFailedJob(job, error);
// On third failure with max attempts reached, should move to DLQ
job.attemptsMade = 3;
job.opts.attempts = 3;
await dlqHandler.handleFailedJob(job, error);
const stats = await dlqHandler.getStats();
expect(stats.total).toBe(1);
});
});
describe('DLQ Statistics', () => {
test('should provide detailed statistics', async () => {
// Add some failed jobs to DLQ
const dlq = new Queue(`test-queue-dlq`, { connection });
await dlq.add('failed-job', {
originalJob: {
id: '1',
name: 'job-type-a',
data: { test: true },
attemptsMade: 3,
},
error: { message: 'Error 1' },
movedToDLQAt: new Date().toISOString(),
});
await dlq.add('failed-job', {
originalJob: {
id: '2',
name: 'job-type-b',
data: { test: true },
attemptsMade: 3,
},
error: { message: 'Error 2' },
movedToDLQAt: new Date().toISOString(),
});
const stats = await dlqHandler.getStats();
expect(stats.total).toBe(2);
expect(stats.recent).toBe(2); // Both are recent
expect(Object.keys(stats.byJobName).length).toBe(2);
expect(stats.oldestJob).toBeDefined();
await dlq.close();
});
test('should count recent jobs correctly', async () => {
const dlq = new Queue(`test-queue-dlq`, { connection });
// Add old job (25 hours ago)
const oldTimestamp = Date.now() - 25 * 60 * 60 * 1000;
await dlq.add('failed-job', {
originalJob: { id: '1', name: 'old-job' },
error: { message: 'Old error' },
movedToDLQAt: new Date(oldTimestamp).toISOString(),
}, { timestamp: oldTimestamp });
// Add recent job
await dlq.add('failed-job', {
originalJob: { id: '2', name: 'recent-job' },
error: { message: 'Recent error' },
movedToDLQAt: new Date().toISOString(),
});
const stats = await dlqHandler.getStats();
expect(stats.total).toBe(2);
expect(stats.recent).toBe(1); // Only one is recent
await dlq.close();
});
});
describe('DLQ Retry', () => {
test('should retry jobs from DLQ', async () => {
const dlq = new Queue(`test-queue-dlq`, { connection });
// Add failed jobs to DLQ
await dlq.add('failed-job', {
originalJob: {
id: '1',
name: 'retry-job',
data: { retry: true },
opts: { priority: 1 },
},
error: { message: 'Failed' },
movedToDLQAt: new Date().toISOString(),
});
await dlq.add('failed-job', {
originalJob: {
id: '2',
name: 'retry-job-2',
data: { retry: true },
opts: {},
},
error: { message: 'Failed' },
movedToDLQAt: new Date().toISOString(),
});
// Retry jobs
const retriedCount = await dlqHandler.retryDLQJobs(10);
expect(retriedCount).toBe(2);
// Check main queue has the retried jobs
const mainQueueJobs = await mainQueue.getWaiting();
expect(mainQueueJobs.length).toBe(2);
expect(mainQueueJobs[0].name).toBe('retry-job');
expect(mainQueueJobs[0].data).toEqual({ retry: true });
// DLQ should be empty
const dlqJobs = await dlq.getCompleted();
expect(dlqJobs.length).toBe(0);
await dlq.close();
});
test('should respect retry limit', async () => {
const dlq = new Queue(`test-queue-dlq`, { connection });
// Add 5 failed jobs
for (let i = 0; i < 5; i++) {
await dlq.add('failed-job', {
originalJob: {
id: `${i}`,
name: `job-${i}`,
data: { index: i },
},
error: { message: 'Failed' },
movedToDLQAt: new Date().toISOString(),
});
}
// Retry only 3 jobs
const retriedCount = await dlqHandler.retryDLQJobs(3);
expect(retriedCount).toBe(3);
// Check counts
const mainQueueJobs = await mainQueue.getWaiting();
expect(mainQueueJobs.length).toBe(3);
const remainingDLQ = await dlq.getCompleted();
expect(remainingDLQ.length).toBe(2);
await dlq.close();
});
});
describe('DLQ Cleanup', () => {
test('should cleanup old DLQ entries', async () => {
const dlq = new Queue(`test-queue-dlq`, { connection });
// Add old job (25 hours ago)
const oldTimestamp = Date.now() - 25 * 60 * 60 * 1000;
await dlq.add('failed-job', {
originalJob: { id: '1', name: 'old-job' },
error: { message: 'Old error' },
}, { timestamp: oldTimestamp });
// Add recent job (1 hour ago)
const recentTimestamp = Date.now() - 1 * 60 * 60 * 1000;
await dlq.add('failed-job', {
originalJob: { id: '2', name: 'recent-job' },
error: { message: 'Recent error' },
}, { timestamp: recentTimestamp });
// Run cleanup (24 hour threshold)
const removedCount = await dlqHandler.cleanup();
expect(removedCount).toBe(1);
// Check remaining jobs
const remaining = await dlq.getCompleted();
expect(remaining.length).toBe(1);
expect(remaining[0].data.originalJob.name).toBe('recent-job');
await dlq.close();
});
});
describe('Failed Job Inspection', () => {
test('should inspect failed jobs', async () => {
const dlq = new Queue(`test-queue-dlq`, { connection });
// Add failed jobs with different error types
await dlq.add('failed-job', {
originalJob: {
id: '1',
name: 'network-job',
data: { url: 'https://api.example.com' },
attemptsMade: 3,
},
error: {
message: 'Network timeout',
stack: 'Error: Network timeout\n at ...',
name: 'NetworkError',
},
movedToDLQAt: '2024-01-01T10:00:00Z',
});
await dlq.add('failed-job', {
originalJob: {
id: '2',
name: 'parse-job',
data: { input: 'invalid-json' },
attemptsMade: 2,
},
error: {
message: 'Invalid JSON',
stack: 'SyntaxError: Invalid JSON\n at ...',
name: 'SyntaxError',
},
movedToDLQAt: '2024-01-01T11:00:00Z',
});
const failedJobs = await dlqHandler.inspectFailedJobs(10);
expect(failedJobs.length).toBe(2);
expect(failedJobs[0]).toMatchObject({
id: '1',
name: 'network-job',
data: { url: 'https://api.example.com' },
error: {
message: 'Network timeout',
name: 'NetworkError',
},
failedAt: '2024-01-01T10:00:00Z',
attempts: 3,
});
await dlq.close();
});
});
describe('Alert Threshold', () => {
test('should detect when alert threshold is exceeded', async () => {
const dlq = new Queue(`test-queue-dlq`, { connection });
// Add jobs to exceed threshold (5)
for (let i = 0; i < 6; i++) {
await dlq.add('failed-job', {
originalJob: {
id: `${i}`,
name: `job-${i}`,
data: { index: i },
},
error: { message: 'Failed' },
movedToDLQAt: new Date().toISOString(),
});
}
const stats = await dlqHandler.getStats();
expect(stats.total).toBe(6);
// In a real implementation, this would trigger alerts
await dlq.close();
});
});
});

View file

@ -0,0 +1,221 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { QueueManager, handlerRegistry } from '../src';
// Suppress Redis connection errors in tests
process.on('unhandledRejection', (reason, promise) => {
if (reason && typeof reason === 'object' && 'message' in reason) {
const message = (reason as Error).message;
if (message.includes('Connection is closed') ||
message.includes('Connection is in monitoring mode')) {
// Suppress these specific Redis errors in tests
return;
}
}
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
});
describe('QueueManager Integration Tests', () => {
let queueManager: QueueManager;
// Use local Redis/Dragonfly
const redisConfig = {
host: 'localhost',
port: 6379,
password: '',
db: 0,
};
beforeEach(() => {
handlerRegistry.clear();
});
afterEach(async () => {
if (queueManager) {
try {
await Promise.race([
queueManager.shutdown(),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Shutdown timeout')), 3000)
)
]);
} catch (error) {
// Ignore shutdown errors in tests
console.warn('Shutdown error:', error.message);
} finally {
queueManager = null as any;
}
}
// Clear handler registry to prevent conflicts
handlerRegistry.clear();
// Add delay to allow connections to close
await new Promise(resolve => setTimeout(resolve, 100));
});
test('should initialize queue manager', async () => {
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 1,
concurrency: 5,
});
await queueManager.initialize();
expect(queueManager.queueName).toBe('test-queue');
});
test('should add and process a job', async () => {
let processedPayload: any;
// Register handler
handlerRegistry.register('test-handler', {
'test-operation': async payload => {
processedPayload = payload;
return { success: true, data: payload };
},
});
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 1,
});
await queueManager.initialize();
// Add job
const job = await queueManager.add('test-job', {
handler: 'test-handler',
operation: 'test-operation',
payload: { message: 'Hello, Queue!' },
});
expect(job.name).toBe('test-job');
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 100));
expect(processedPayload).toEqual({ message: 'Hello, Queue!' });
});
test('should handle job errors with retries', async () => {
let attemptCount = 0;
handlerRegistry.register('retry-handler', {
'failing-operation': async () => {
attemptCount++;
if (attemptCount < 3) {
throw new Error(`Attempt ${attemptCount} failed`);
}
return { success: true };
},
});
queueManager = new QueueManager({
queueName: 'test-queue-retry',
redis: redisConfig,
workers: 1,
defaultJobOptions: {
attempts: 3,
backoff: {
type: 'fixed',
delay: 50,
},
},
});
await queueManager.initialize();
const job = await queueManager.add('retry-job', {
handler: 'retry-handler',
operation: 'failing-operation',
payload: {},
});
// Wait for retries
await new Promise(resolve => setTimeout(resolve, 500));
const completed = await job.isCompleted();
expect(completed).toBe(true);
expect(attemptCount).toBe(3);
});
test('should collect metrics when enabled', async () => {
queueManager = new QueueManager({
queueName: 'test-queue-metrics',
redis: redisConfig,
workers: 0,
enableMetrics: true,
});
await queueManager.initialize();
// Add some jobs
await queueManager.add('job1', {
handler: 'test',
operation: 'test',
payload: { id: 1 },
});
await queueManager.add('job2', {
handler: 'test',
operation: 'test',
payload: { id: 2 },
});
const metrics = await queueManager.getMetrics();
expect(metrics).toBeDefined();
expect(metrics.waiting).toBeDefined();
expect(metrics.active).toBeDefined();
expect(metrics.completed).toBeDefined();
expect(metrics.failed).toBeDefined();
expect(metrics.processingTime).toBeDefined();
expect(metrics.throughput).toBeDefined();
});
test('should handle rate limiting when enabled', async () => {
let processedCount = 0;
handlerRegistry.register('rate-limited-handler', {
'limited-op': async () => {
processedCount++;
return { processed: true };
},
});
queueManager = new QueueManager({
queueName: 'test-queue-rate',
redis: redisConfig,
workers: 1,
enableRateLimit: true,
rateLimitRules: [
{
level: 'handler',
handler: 'rate-limited-handler',
config: {
points: 2, // 2 requests
duration: 1, // per 1 second
},
},
],
});
await queueManager.initialize();
// Add 3 jobs quickly
for (let i = 0; i < 3; i++) {
await queueManager.add(`job${i}`, {
handler: 'rate-limited-handler',
operation: 'limited-op',
payload: { id: i },
});
}
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 200));
// Only 2 should be processed due to rate limit
expect(processedCount).toBe(2);
});
});

View file

@ -0,0 +1,371 @@
import { afterEach, beforeEach, describe, expect, test } from 'bun:test';
import { handlerRegistry, QueueManager } from '../src';
// Suppress Redis connection errors in tests
process.on('unhandledRejection', (reason, promise) => {
if (reason && typeof reason === 'object' && 'message' in reason) {
const message = (reason as Error).message;
if (message.includes('Connection is closed') ||
message.includes('Connection is in monitoring mode')) {
return;
}
}
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
});
describe('QueueManager', () => {
let queueManager: QueueManager;
// Use local Redis/Dragonfly
const redisConfig = {
host: 'localhost',
port: 6379,
password: '',
db: 0,
};
beforeEach(() => {
handlerRegistry.clear();
});
afterEach(async () => {
if (queueManager) {
try {
await Promise.race([
queueManager.shutdown(),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Shutdown timeout')), 3000)
)
]);
} catch (error) {
console.warn('Shutdown error:', error.message);
} finally {
queueManager = null as any;
}
}
handlerRegistry.clear();
await new Promise(resolve => setTimeout(resolve, 100));
});
describe('Basic Operations', () => {
test('should initialize queue manager', async () => {
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 1,
concurrency: 5,
});
await queueManager.initialize();
expect(queueManager.queueName).toBe('test-queue');
});
test('should add and process a job', async () => {
let processedPayload: any;
// Register handler
handlerRegistry.register('test-handler', {
'test-operation': async payload => {
processedPayload = payload;
return { success: true, data: payload };
},
});
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 1,
});
await queueManager.initialize();
// Add job
const job = await queueManager.add('test-job', {
handler: 'test-handler',
operation: 'test-operation',
payload: { message: 'Hello, Queue!' },
});
expect(job.name).toBe('test-job');
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 100));
expect(processedPayload).toEqual({ message: 'Hello, Queue!' });
});
test('should handle missing handler gracefully', async () => {
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 1,
});
await queueManager.initialize();
const job = await queueManager.add('test-job', {
handler: 'non-existent',
operation: 'test-operation',
payload: { test: true },
});
// Wait for job to fail
await new Promise(resolve => setTimeout(resolve, 100));
const failed = await job.isFailed();
expect(failed).toBe(true);
});
test('should add multiple jobs in bulk', async () => {
let processedCount = 0;
handlerRegistry.register('bulk-handler', {
process: async payload => {
processedCount++;
return { processed: true };
},
});
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 2,
concurrency: 5,
});
await queueManager.initialize();
const jobs = await queueManager.addBulk([
{
name: 'job1',
data: { handler: 'bulk-handler', operation: 'process', payload: { id: 1 } },
},
{
name: 'job2',
data: { handler: 'bulk-handler', operation: 'process', payload: { id: 2 } },
},
{
name: 'job3',
data: { handler: 'bulk-handler', operation: 'process', payload: { id: 3 } },
},
]);
expect(jobs.length).toBe(3);
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 200));
expect(processedCount).toBe(3);
});
test('should get queue statistics', async () => {
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 0, // No workers, jobs will stay in waiting
});
await queueManager.initialize();
// Add some jobs
await queueManager.add('job1', {
handler: 'test',
operation: 'test',
payload: { id: 1 },
});
await queueManager.add('job2', {
handler: 'test',
operation: 'test',
payload: { id: 2 },
});
const stats = await queueManager.getStats();
expect(stats.waiting).toBe(2);
expect(stats.active).toBe(0);
expect(stats.completed).toBe(0);
expect(stats.failed).toBe(0);
});
test('should pause and resume queue', async () => {
let processedCount = 0;
handlerRegistry.register('pause-test', {
process: async () => {
processedCount++;
return { ok: true };
},
});
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 1,
});
await queueManager.initialize();
// Pause queue
await queueManager.pause();
// Add job while paused
await queueManager.add('job1', {
handler: 'pause-test',
operation: 'process',
payload: {},
});
// Wait a bit - job should not be processed
await new Promise(resolve => setTimeout(resolve, 100));
expect(processedCount).toBe(0);
// Resume queue
await queueManager.resume();
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 100));
expect(processedCount).toBe(1);
});
});
describe('Scheduled Jobs', () => {
test('should register and process scheduled jobs', async () => {
let executionCount = 0;
handlerRegistry.registerWithSchedule({
name: 'scheduled-handler',
operations: {
'scheduled-task': async payload => {
executionCount++;
return { executed: true, timestamp: Date.now() };
},
},
scheduledJobs: [
{
type: 'test-schedule',
operation: 'scheduled-task',
payload: { test: true },
cronPattern: '*/1 * * * * *', // Every second
description: 'Test scheduled job',
},
],
});
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 1,
enableScheduledJobs: true,
});
await queueManager.initialize();
// Wait for scheduled job to execute
await new Promise(resolve => setTimeout(resolve, 2500));
expect(executionCount).toBeGreaterThanOrEqual(2);
});
});
describe('Error Handling', () => {
test('should handle job errors with retries', async () => {
let attemptCount = 0;
handlerRegistry.register('retry-handler', {
'failing-operation': async () => {
attemptCount++;
if (attemptCount < 3) {
throw new Error(`Attempt ${attemptCount} failed`);
}
return { success: true };
},
});
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 1,
defaultJobOptions: {
attempts: 3,
backoff: {
type: 'fixed',
delay: 50,
},
},
});
await queueManager.initialize();
const job = await queueManager.add('retry-job', {
handler: 'retry-handler',
operation: 'failing-operation',
payload: {},
});
// Wait for retries
await new Promise(resolve => setTimeout(resolve, 500));
const completed = await job.isCompleted();
expect(completed).toBe(true);
expect(attemptCount).toBe(3);
});
});
describe('Multiple Handlers', () => {
test('should handle multiple handlers with different operations', async () => {
const results: any[] = [];
handlerRegistry.register('handler-a', {
'operation-1': async payload => {
results.push({ handler: 'a', op: '1', payload });
return { handler: 'a', op: '1' };
},
'operation-2': async payload => {
results.push({ handler: 'a', op: '2', payload });
return { handler: 'a', op: '2' };
},
});
handlerRegistry.register('handler-b', {
'operation-1': async payload => {
results.push({ handler: 'b', op: '1', payload });
return { handler: 'b', op: '1' };
},
});
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
workers: 2,
});
await queueManager.initialize();
// Add jobs for different handlers
await queueManager.addBulk([
{
name: 'job1',
data: { handler: 'handler-a', operation: 'operation-1', payload: { id: 1 } },
},
{
name: 'job2',
data: { handler: 'handler-a', operation: 'operation-2', payload: { id: 2 } },
},
{
name: 'job3',
data: { handler: 'handler-b', operation: 'operation-1', payload: { id: 3 } },
},
]);
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 200));
expect(results.length).toBe(3);
expect(results).toContainEqual({ handler: 'a', op: '1', payload: { id: 1 } });
expect(results).toContainEqual({ handler: 'a', op: '2', payload: { id: 2 } });
expect(results).toContainEqual({ handler: 'b', op: '1', payload: { id: 3 } });
});
});
});

View file

@ -0,0 +1,303 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { Queue, QueueEvents, Worker } from 'bullmq';
import { QueueMetricsCollector } from '../src/queue-metrics';
import { getRedisConnection } from '../src/utils';
// Suppress Redis connection errors in tests
process.on('unhandledRejection', (reason, promise) => {
if (reason && typeof reason === 'object' && 'message' in reason) {
const message = (reason as Error).message;
if (message.includes('Connection is closed') ||
message.includes('Connection is in monitoring mode')) {
return;
}
}
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
});
describe('QueueMetricsCollector', () => {
let queue: Queue;
let queueEvents: QueueEvents;
let metricsCollector: QueueMetricsCollector;
let worker: Worker;
let connection: any;
const redisConfig = {
host: 'localhost',
port: 6379,
password: '',
db: 0,
};
beforeEach(async () => {
connection = getRedisConnection(redisConfig);
// Create queue and events
queue = new Queue('metrics-test-queue', { connection });
queueEvents = new QueueEvents('metrics-test-queue', { connection });
// Create metrics collector
metricsCollector = new QueueMetricsCollector(queue, queueEvents);
// Wait for connections
await queue.waitUntilReady();
await queueEvents.waitUntilReady();
});
afterEach(async () => {
try {
if (worker) {
await worker.close();
}
await queueEvents.close();
await queue.close();
} catch (error) {
// Ignore cleanup errors
}
await new Promise(resolve => setTimeout(resolve, 50));
});
describe('Job Count Metrics', () => {
test('should collect basic job counts', async () => {
// Add jobs in different states
await queue.add('waiting-job', { test: true });
await queue.add('delayed-job', { test: true }, { delay: 60000 });
const metrics = await metricsCollector.collect();
expect(metrics.waiting).toBe(1);
expect(metrics.delayed).toBe(1);
expect(metrics.active).toBe(0);
expect(metrics.completed).toBe(0);
expect(metrics.failed).toBe(0);
});
test('should track completed and failed jobs', async () => {
let jobCount = 0;
// Create worker that alternates between success and failure
worker = new Worker('metrics-test-queue', async () => {
jobCount++;
if (jobCount % 2 === 0) {
throw new Error('Test failure');
}
return { success: true };
}, { connection });
// Add jobs
await queue.add('job1', { test: 1 });
await queue.add('job2', { test: 2 });
await queue.add('job3', { test: 3 });
await queue.add('job4', { test: 4 });
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 200));
const metrics = await metricsCollector.collect();
expect(metrics.completed).toBe(2);
expect(metrics.failed).toBe(2);
});
});
describe('Processing Time Metrics', () => {
test('should track processing times', async () => {
const processingTimes = [50, 100, 150, 200, 250];
let jobIndex = 0;
// Create worker with variable processing times
worker = new Worker('metrics-test-queue', async () => {
const delay = processingTimes[jobIndex++] || 100;
await new Promise(resolve => setTimeout(resolve, delay));
return { processed: true };
}, { connection });
// Add jobs
for (let i = 0; i < processingTimes.length; i++) {
await queue.add(`job${i}`, { index: i });
}
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 1500));
const metrics = await metricsCollector.collect();
expect(metrics.processingTime.avg).toBeGreaterThan(0);
expect(metrics.processingTime.min).toBeGreaterThanOrEqual(50);
expect(metrics.processingTime.max).toBeLessThanOrEqual(300);
expect(metrics.processingTime.p95).toBeGreaterThan(metrics.processingTime.avg);
});
test('should handle empty processing times', async () => {
const metrics = await metricsCollector.collect();
expect(metrics.processingTime).toEqual({
avg: 0,
min: 0,
max: 0,
p95: 0,
p99: 0,
});
});
});
describe('Throughput Metrics', () => {
test('should calculate throughput correctly', async () => {
// Create fast worker
worker = new Worker('metrics-test-queue', async () => {
return { success: true };
}, { connection, concurrency: 5 });
// Add multiple jobs
const jobPromises = [];
for (let i = 0; i < 10; i++) {
jobPromises.push(queue.add(`job${i}`, { index: i }));
}
await Promise.all(jobPromises);
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 500));
const metrics = await metricsCollector.collect();
expect(metrics.throughput.completedPerMinute).toBeGreaterThan(0);
expect(metrics.throughput.totalPerMinute).toBe(
metrics.throughput.completedPerMinute + metrics.throughput.failedPerMinute
);
});
});
describe('Queue Health', () => {
test('should report healthy queue', async () => {
const metrics = await metricsCollector.collect();
expect(metrics.isHealthy).toBe(true);
expect(metrics.healthIssues).toEqual([]);
});
test('should detect high failure rate', async () => {
// Create worker that always fails
worker = new Worker('metrics-test-queue', async () => {
throw new Error('Always fails');
}, { connection });
// Add jobs
for (let i = 0; i < 10; i++) {
await queue.add(`job${i}`, { index: i });
}
// Wait for failures
await new Promise(resolve => setTimeout(resolve, 500));
const metrics = await metricsCollector.collect();
expect(metrics.isHealthy).toBe(false);
expect(metrics.healthIssues).toContain(
expect.stringMatching(/High failure rate/)
);
});
test('should detect large queue backlog', async () => {
// Add many jobs without workers
for (let i = 0; i < 1001; i++) {
await queue.add(`job${i}`, { index: i });
}
const metrics = await metricsCollector.collect();
expect(metrics.isHealthy).toBe(false);
expect(metrics.healthIssues).toContain(
expect.stringMatching(/Large queue backlog/)
);
});
});
describe('Oldest Waiting Job', () => {
test('should track oldest waiting job', async () => {
const beforeAdd = Date.now();
// Add jobs with delays
await queue.add('old-job', { test: true });
await new Promise(resolve => setTimeout(resolve, 100));
await queue.add('new-job', { test: true });
const metrics = await metricsCollector.collect();
expect(metrics.oldestWaitingJob).toBeDefined();
expect(metrics.oldestWaitingJob!.getTime()).toBeGreaterThanOrEqual(beforeAdd);
});
test('should return null when no waiting jobs', async () => {
// Create worker that processes immediately
worker = new Worker('metrics-test-queue', async () => {
return { success: true };
}, { connection });
const metrics = await metricsCollector.collect();
expect(metrics.oldestWaitingJob).toBe(null);
});
});
describe('Metrics Report', () => {
test('should generate formatted report', async () => {
// Add some jobs
await queue.add('job1', { test: true });
await queue.add('job2', { test: true }, { delay: 5000 });
const report = await metricsCollector.getReport();
expect(report).toContain('Queue Metrics Report');
expect(report).toContain('Status:');
expect(report).toContain('Job Counts:');
expect(report).toContain('Performance:');
expect(report).toContain('Throughput:');
expect(report).toContain('Waiting: 1');
expect(report).toContain('Delayed: 1');
});
test('should include health issues in report', async () => {
// Add many jobs to trigger health issue
for (let i = 0; i < 1001; i++) {
await queue.add(`job${i}`, { index: i });
}
const report = await metricsCollector.getReport();
expect(report).toContain('Issues Detected');
expect(report).toContain('Health Issues:');
expect(report).toContain('Large queue backlog');
});
});
describe('Prometheus Metrics', () => {
test('should export metrics in Prometheus format', async () => {
// Add some jobs and process them
worker = new Worker('metrics-test-queue', async () => {
await new Promise(resolve => setTimeout(resolve, 50));
return { success: true };
}, { connection });
await queue.add('job1', { test: true });
await queue.add('job2', { test: true });
// Wait for processing
await new Promise(resolve => setTimeout(resolve, 200));
const prometheusMetrics = await metricsCollector.getPrometheusMetrics();
// Check format
expect(prometheusMetrics).toContain('# HELP queue_jobs_total');
expect(prometheusMetrics).toContain('# TYPE queue_jobs_total gauge');
expect(prometheusMetrics).toContain('queue_jobs_total{queue="metrics-test-queue",status="completed"}');
expect(prometheusMetrics).toContain('# HELP queue_processing_time_seconds');
expect(prometheusMetrics).toContain('# TYPE queue_processing_time_seconds summary');
expect(prometheusMetrics).toContain('# HELP queue_throughput_per_minute');
expect(prometheusMetrics).toContain('# TYPE queue_throughput_per_minute gauge');
expect(prometheusMetrics).toContain('# HELP queue_health');
expect(prometheusMetrics).toContain('# TYPE queue_health gauge');
});
});
});

View file

@ -0,0 +1,81 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { QueueManager, handlerRegistry } from '../src';
describe('QueueManager Simple Tests', () => {
let queueManager: QueueManager;
// Assumes Redis is running locally on default port
const redisConfig = {
host: 'localhost',
port: 6379,
};
beforeEach(() => {
handlerRegistry.clear();
});
afterEach(async () => {
if (queueManager) {
try {
await queueManager.shutdown();
} catch (error) {
// Ignore errors during cleanup
}
}
});
test('should create queue manager instance', () => {
queueManager = new QueueManager({
queueName: 'test-queue',
redis: redisConfig,
});
expect(queueManager.queueName).toBe('test-queue');
});
test('should handle missing Redis gracefully', async () => {
// Use a port that's likely not running Redis
queueManager = new QueueManager({
queueName: 'test-queue',
redis: {
host: 'localhost',
port: 9999,
},
});
await expect(queueManager.initialize()).rejects.toThrow();
});
test('handler registry should work', () => {
const testHandler = async (payload: any) => {
return { success: true, payload };
};
handlerRegistry.register('test-handler', {
'test-op': testHandler,
});
const handler = handlerRegistry.getHandler('test-handler', 'test-op');
expect(handler).toBe(testHandler);
});
test('handler registry should return null for missing handler', () => {
const handler = handlerRegistry.getHandler('missing', 'op');
expect(handler).toBe(null);
});
test('should get handler statistics', () => {
handlerRegistry.register('handler1', {
'op1': async () => ({}),
'op2': async () => ({}),
});
handlerRegistry.register('handler2', {
'op1': async () => ({}),
});
const stats = handlerRegistry.getStats();
expect(stats.handlers).toBe(2);
expect(stats.totalOperations).toBe(3);
});
});

View file

@ -0,0 +1,309 @@
import { describe, test, expect, beforeEach, afterEach } from 'bun:test';
import { QueueRateLimiter } from '../src/rate-limiter';
import { getRedisConnection } from '../src/utils';
import Redis from 'ioredis';
// Suppress Redis connection errors in tests
process.on('unhandledRejection', (reason, promise) => {
if (reason && typeof reason === 'object' && 'message' in reason) {
const message = (reason as Error).message;
if (message.includes('Connection is closed') ||
message.includes('Connection is in monitoring mode')) {
return;
}
}
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
});
describe('QueueRateLimiter', () => {
let redisClient: Redis;
let rateLimiter: QueueRateLimiter;
const redisConfig = {
host: 'localhost',
port: 6379,
password: '',
db: 0,
};
beforeEach(async () => {
// Create Redis client
redisClient = new Redis(getRedisConnection(redisConfig));
// Clear Redis keys for tests
try {
const keys = await redisClient.keys('rl:*');
if (keys.length > 0) {
await redisClient.del(...keys);
}
} catch (error) {
// Ignore cleanup errors
}
rateLimiter = new QueueRateLimiter(redisClient);
});
afterEach(async () => {
if (redisClient) {
try {
await redisClient.quit();
} catch (error) {
// Ignore cleanup errors
}
}
await new Promise(resolve => setTimeout(resolve, 50));
});
describe('Rate Limit Rules', () => {
test('should add and enforce global rate limit', async () => {
rateLimiter.addRule({
level: 'global',
config: {
points: 5,
duration: 1, // 1 second
},
});
// Consume 5 points
for (let i = 0; i < 5; i++) {
const result = await rateLimiter.checkLimit('any-handler', 'any-operation');
expect(result.allowed).toBe(true);
}
// 6th request should be blocked
const blocked = await rateLimiter.checkLimit('any-handler', 'any-operation');
expect(blocked.allowed).toBe(false);
expect(blocked.retryAfter).toBeGreaterThan(0);
});
test('should add and enforce handler-level rate limit', async () => {
rateLimiter.addRule({
level: 'handler',
handler: 'api-handler',
config: {
points: 3,
duration: 1,
},
});
// api-handler should be limited
for (let i = 0; i < 3; i++) {
const result = await rateLimiter.checkLimit('api-handler', 'any-operation');
expect(result.allowed).toBe(true);
}
const blocked = await rateLimiter.checkLimit('api-handler', 'any-operation');
expect(blocked.allowed).toBe(false);
// Other handlers should not be limited
const otherHandler = await rateLimiter.checkLimit('other-handler', 'any-operation');
expect(otherHandler.allowed).toBe(true);
});
test('should add and enforce operation-level rate limit', async () => {
rateLimiter.addRule({
level: 'operation',
handler: 'data-handler',
operation: 'fetch-prices',
config: {
points: 2,
duration: 1,
},
});
// Specific operation should be limited
for (let i = 0; i < 2; i++) {
const result = await rateLimiter.checkLimit('data-handler', 'fetch-prices');
expect(result.allowed).toBe(true);
}
const blocked = await rateLimiter.checkLimit('data-handler', 'fetch-prices');
expect(blocked.allowed).toBe(false);
// Other operations on same handler should work
const otherOp = await rateLimiter.checkLimit('data-handler', 'fetch-volume');
expect(otherOp.allowed).toBe(true);
});
test('should enforce multiple rate limits (most restrictive wins)', async () => {
// Global: 10/sec
rateLimiter.addRule({
level: 'global',
config: { points: 10, duration: 1 },
});
// Handler: 5/sec
rateLimiter.addRule({
level: 'handler',
handler: 'test-handler',
config: { points: 5, duration: 1 },
});
// Operation: 2/sec
rateLimiter.addRule({
level: 'operation',
handler: 'test-handler',
operation: 'test-op',
config: { points: 2, duration: 1 },
});
// Should be limited by operation level (most restrictive)
for (let i = 0; i < 2; i++) {
const result = await rateLimiter.checkLimit('test-handler', 'test-op');
expect(result.allowed).toBe(true);
}
const blocked = await rateLimiter.checkLimit('test-handler', 'test-op');
expect(blocked.allowed).toBe(false);
});
});
describe('Rate Limit Status', () => {
test('should get rate limit status', async () => {
rateLimiter.addRule({
level: 'handler',
handler: 'status-test',
config: { points: 10, duration: 60 },
});
// Consume some points
await rateLimiter.checkLimit('status-test', 'operation');
await rateLimiter.checkLimit('status-test', 'operation');
const status = await rateLimiter.getStatus('status-test', 'operation');
expect(status.handler).toBe('status-test');
expect(status.operation).toBe('operation');
expect(status.limits.length).toBe(1);
expect(status.limits[0].points).toBe(10);
expect(status.limits[0].remaining).toBe(8);
});
test('should show multiple applicable limits in status', async () => {
rateLimiter.addRule({
level: 'global',
config: { points: 100, duration: 60 },
});
rateLimiter.addRule({
level: 'handler',
handler: 'multi-test',
config: { points: 50, duration: 60 },
});
const status = await rateLimiter.getStatus('multi-test', 'operation');
expect(status.limits.length).toBe(2);
const globalLimit = status.limits.find(l => l.level === 'global');
const handlerLimit = status.limits.find(l => l.level === 'handler');
expect(globalLimit?.points).toBe(100);
expect(handlerLimit?.points).toBe(50);
});
});
describe('Rate Limit Management', () => {
test('should reset rate limits', async () => {
rateLimiter.addRule({
level: 'handler',
handler: 'reset-test',
config: { points: 1, duration: 60 },
});
// Consume the limit
await rateLimiter.checkLimit('reset-test', 'operation');
let blocked = await rateLimiter.checkLimit('reset-test', 'operation');
expect(blocked.allowed).toBe(false);
// Reset limits
await rateLimiter.reset('reset-test');
// Should be allowed again
const afterReset = await rateLimiter.checkLimit('reset-test', 'operation');
expect(afterReset.allowed).toBe(true);
});
test('should get all rules', async () => {
rateLimiter.addRule({
level: 'global',
config: { points: 100, duration: 60 },
});
rateLimiter.addRule({
level: 'handler',
handler: 'test',
config: { points: 50, duration: 60 },
});
const rules = rateLimiter.getRules();
expect(rules.length).toBe(2);
expect(rules[0].level).toBe('global');
expect(rules[1].level).toBe('handler');
});
test('should remove specific rule', async () => {
rateLimiter.addRule({
level: 'handler',
handler: 'remove-test',
config: { points: 1, duration: 1 },
});
// Verify rule exists
await rateLimiter.checkLimit('remove-test', 'op');
let blocked = await rateLimiter.checkLimit('remove-test', 'op');
expect(blocked.allowed).toBe(false);
// Remove rule
const removed = rateLimiter.removeRule('handler', 'remove-test');
expect(removed).toBe(true);
// Should not be limited anymore
const afterRemove = await rateLimiter.checkLimit('remove-test', 'op');
expect(afterRemove.allowed).toBe(true);
});
});
describe('Block Duration', () => {
test('should block for specified duration after limit exceeded', async () => {
rateLimiter.addRule({
level: 'handler',
handler: 'block-test',
config: {
points: 1,
duration: 1,
blockDuration: 2, // Block for 2 seconds
},
});
// Consume limit
await rateLimiter.checkLimit('block-test', 'op');
// Should be blocked
const blocked = await rateLimiter.checkLimit('block-test', 'op');
expect(blocked.allowed).toBe(false);
expect(blocked.retryAfter).toBeGreaterThanOrEqual(1000); // At least 1 second
});
});
describe('Error Handling', () => {
test('should allow requests when rate limiter fails', async () => {
// Create a rate limiter with invalid redis client
const badRedis = new Redis({
host: 'invalid-host',
port: 9999,
retryStrategy: () => null, // Disable retries
});
const failingLimiter = new QueueRateLimiter(badRedis);
failingLimiter.addRule({
level: 'global',
config: { points: 1, duration: 1 },
});
// Should allow even though Redis is not available
const result = await failingLimiter.checkLimit('test', 'test');
expect(result.allowed).toBe(true);
badRedis.disconnect();
});
});
});

19
libs/queue/turbo.json Normal file
View file

@ -0,0 +1,19 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/cache#build", "@stock-bot/logger#build", "@stock-bot/types#build"],
"outputs": ["dist/**"],
"inputs": [
"src/**",
"package.json",
"tsconfig.json",
"!**/*.test.ts",
"!**/*.spec.ts",
"!**/test/**",
"!**/tests/**",
"!**/__tests__/**"
]
}
}
}

View file

@ -11,7 +11,6 @@
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/data-frame": "*",
"@stock-bot/event-bus": "*",
"@stock-bot/logger": "*",
"@stock-bot/utils": "*",

View file

@ -1,5 +1,4 @@
import { EventEmitter } from 'eventemitter3';
import { DataFrame } from '@stock-bot/data-frame';
import { EventBus } from '@stock-bot/event-bus';
import { getLogger } from '@stock-bot/logger';
@ -29,7 +28,6 @@ export interface TradingSignal {
export interface StrategyContext {
symbol: string;
timeframe: string;
data: DataFrame;
indicators: Record<string, any>;
position?: Position;
portfolio: PortfolioSummary;
@ -310,7 +308,6 @@ export class StrategyEngine extends EventEmitter {
const context: StrategyContext = {
symbol,
timeframe: '1m', // TODO: Get from strategy config
data: new DataFrame([data]), // TODO: Use historical data
indicators: {},
portfolio: {
totalValue: 100000, // TODO: Get real portfolio data

View file

@ -1,9 +1,4 @@
{
"extends": "../../tsconfig.lib.json",
"references": [
{ "path": "../data-frame" },
{ "path": "../event-bus" },
{ "path": "../logger" },
{ "path": "../utils" }
]
"references": [{ "path": "../event-bus" }, { "path": "../logger" }, { "path": "../utils" }]
}

View file

@ -12,8 +12,7 @@
},
"dependencies": {
"@stock-bot/logger": "*",
"@stock-bot/utils": "*",
"@stock-bot/data-frame": "*"
"@stock-bot/utils": "*"
},
"devDependencies": {
"@types/node": "^20.11.0",

View file

@ -1,397 +1,397 @@
import { DataFrame } from '@stock-bot/data-frame';
import { getLogger } from '@stock-bot/logger';
import { atr, bollingerBands, ema, macd, rsi, sma } from '@stock-bot/utils';
// import { DataFrame } from '@stock-bot/data-frame';
// import { getLogger } from '@stock-bot/logger';
// import { atr, bollingerBands, ema, macd, rsi, sma } from '@stock-bot/utils';
// Vector operations interface
export interface VectorOperation {
name: string;
inputs: string[];
output: string;
operation: (inputs: number[][]) => number[];
}
// // Vector operations interface
// export interface VectorOperation {
// name: string;
// inputs: string[];
// output: string;
// operation: (inputs: number[][]) => number[];
// }
// Vectorized strategy context
export interface VectorizedContext {
data: DataFrame;
lookback: number;
indicators: Record<string, number[]>;
signals: Record<string, number[]>;
}
// // Vectorized strategy context
// export interface VectorizedContext {
// data: DataFrame;
// lookback: number;
// indicators: Record<string, number[]>;
// signals: Record<string, number[]>;
// }
// Performance metrics for vectorized backtesting
export interface VectorizedMetrics {
totalReturns: number;
sharpeRatio: number;
maxDrawdown: number;
winRate: number;
profitFactor: number;
totalTrades: number;
avgTrade: number;
returns: number[];
drawdown: number[];
equity: number[];
}
// // Performance metrics for vectorized backtesting
// export interface VectorizedMetrics {
// totalReturns: number;
// sharpeRatio: number;
// maxDrawdown: number;
// winRate: number;
// profitFactor: number;
// totalTrades: number;
// avgTrade: number;
// returns: number[];
// drawdown: number[];
// equity: number[];
// }
// Vectorized backtest result
export interface VectorizedBacktestResult {
metrics: VectorizedMetrics;
trades: VectorizedTrade[];
equity: number[];
timestamps: number[];
signals: Record<string, number[]>;
}
// // Vectorized backtest result
// export interface VectorizedBacktestResult {
// metrics: VectorizedMetrics;
// trades: VectorizedTrade[];
// equity: number[];
// timestamps: number[];
// signals: Record<string, number[]>;
// }
export interface VectorizedTrade {
entryIndex: number;
exitIndex: number;
entryPrice: number;
exitPrice: number;
quantity: number;
side: 'LONG' | 'SHORT';
pnl: number;
return: number;
duration: number;
}
// export interface VectorizedTrade {
// entryIndex: number;
// exitIndex: number;
// entryPrice: number;
// exitPrice: number;
// quantity: number;
// side: 'LONG' | 'SHORT';
// pnl: number;
// return: number;
// duration: number;
// }
// Vectorized strategy engine
export class VectorEngine {
private logger = getLogger('vector-engine');
private operations: Map<string, VectorOperation> = new Map();
// // Vectorized strategy engine
// export class VectorEngine {
// private logger = getLogger('vector-engine');
// private operations: Map<string, VectorOperation> = new Map();
constructor() {
this.registerDefaultOperations();
}
// constructor() {
// this.registerDefaultOperations();
// }
private registerDefaultOperations(): void {
// Register common mathematical operations
this.registerOperation({
name: 'add',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val + b[i]),
});
// private registerDefaultOperations(): void {
// // Register common mathematical operations
// this.registerOperation({
// name: 'add',
// inputs: ['a', 'b'],
// output: 'result',
// operation: ([a, b]) => a.map((val, i) => val + b[i]),
// });
this.registerOperation({
name: 'subtract',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val - b[i]),
});
// this.registerOperation({
// name: 'subtract',
// inputs: ['a', 'b'],
// output: 'result',
// operation: ([a, b]) => a.map((val, i) => val - b[i]),
// });
this.registerOperation({
name: 'multiply',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val * b[i]),
});
// this.registerOperation({
// name: 'multiply',
// inputs: ['a', 'b'],
// output: 'result',
// operation: ([a, b]) => a.map((val, i) => val * b[i]),
// });
this.registerOperation({
name: 'divide',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => (b[i] !== 0 ? val / b[i] : NaN)),
});
// this.registerOperation({
// name: 'divide',
// inputs: ['a', 'b'],
// output: 'result',
// operation: ([a, b]) => a.map((val, i) => (b[i] !== 0 ? val / b[i] : NaN)),
// });
// Register comparison operations
this.registerOperation({
name: 'greater_than',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => (val > b[i] ? 1 : 0)),
});
// // Register comparison operations
// this.registerOperation({
// name: 'greater_than',
// inputs: ['a', 'b'],
// output: 'result',
// operation: ([a, b]) => a.map((val, i) => (val > b[i] ? 1 : 0)),
// });
this.registerOperation({
name: 'less_than',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => (val < b[i] ? 1 : 0)),
});
// this.registerOperation({
// name: 'less_than',
// inputs: ['a', 'b'],
// output: 'result',
// operation: ([a, b]) => a.map((val, i) => (val < b[i] ? 1 : 0)),
// });
this.registerOperation({
name: 'crossover',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => {
const result = new Array(a.length).fill(0);
for (let i = 1; i < a.length; i++) {
if (a[i] > b[i] && a[i - 1] <= b[i - 1]) {
result[i] = 1;
}
}
return result;
},
});
// this.registerOperation({
// name: 'crossover',
// inputs: ['a', 'b'],
// output: 'result',
// operation: ([a, b]) => {
// const result = new Array(a.length).fill(0);
// for (let i = 1; i < a.length; i++) {
// if (a[i] > b[i] && a[i - 1] <= b[i - 1]) {
// result[i] = 1;
// }
// }
// return result;
// },
// });
this.registerOperation({
name: 'crossunder',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => {
const result = new Array(a.length).fill(0);
for (let i = 1; i < a.length; i++) {
if (a[i] < b[i] && a[i - 1] >= b[i - 1]) {
result[i] = 1;
}
}
return result;
},
});
}
// this.registerOperation({
// name: 'crossunder',
// inputs: ['a', 'b'],
// output: 'result',
// operation: ([a, b]) => {
// const result = new Array(a.length).fill(0);
// for (let i = 1; i < a.length; i++) {
// if (a[i] < b[i] && a[i - 1] >= b[i - 1]) {
// result[i] = 1;
// }
// }
// return result;
// },
// });
// }
registerOperation(operation: VectorOperation): void {
this.operations.set(operation.name, operation);
this.logger.debug(`Registered operation: ${operation.name}`);
}
// registerOperation(operation: VectorOperation): void {
// this.operations.set(operation.name, operation);
// this.logger.debug(`Registered operation: ${operation.name}`);
// }
// Execute vectorized strategy
async executeVectorizedStrategy(
data: DataFrame,
strategyCode: string
): Promise<VectorizedBacktestResult> {
try {
const context = this.prepareContext(data);
const signals = this.executeStrategy(context, strategyCode);
const trades = this.generateTrades(data, signals);
const metrics = this.calculateMetrics(data, trades);
// // Execute vectorized strategy
// async executeVectorizedStrategy(
// data: DataFrame,
// strategyCode: string
// ): Promise<VectorizedBacktestResult> {
// try {
// const context = this.prepareContext(data);
// const signals = this.executeStrategy(context, strategyCode);
// const trades = this.generateTrades(data, signals);
// const metrics = this.calculateMetrics(data, trades);
return {
metrics,
trades,
equity: metrics.equity,
timestamps: data.getColumn('timestamp'),
signals,
};
} catch (error) {
this.logger.error('Vectorized strategy execution failed', error);
throw error;
}
}
// return {
// metrics,
// trades,
// equity: metrics.equity,
// timestamps: data.getColumn('timestamp'),
// signals,
// };
// } catch (error) {
// this.logger.error('Vectorized strategy execution failed', error);
// throw error;
// }
// }
private prepareContext(data: DataFrame): VectorizedContext {
const close = data.getColumn('close');
const high = data.getColumn('high');
const low = data.getColumn('low');
const volume = data.getColumn('volume');
// private prepareContext(data: DataFrame): VectorizedContext {
// const close = data.getColumn('close');
// const high = data.getColumn('high');
// const low = data.getColumn('low');
// const volume = data.getColumn('volume');
// Calculate common indicators
const indicators: Record<string, number[]> = {
sma_20: sma(close, 20),
sma_50: sma(close, 50),
ema_12: ema(close, 12),
ema_26: ema(close, 26),
rsi: rsi(close),
};
// // Calculate common indicators
// const indicators: Record<string, number[]> = {
// sma_20: sma(close, 20),
// sma_50: sma(close, 50),
// ema_12: ema(close, 12),
// ema_26: ema(close, 26),
// rsi: rsi(close),
// };
const m = macd(close);
indicators.macd = m.macd;
indicators.macd_signal = m.signal;
indicators.macd_histogram = m.histogram;
// const m = macd(close);
// indicators.macd = m.macd;
// indicators.macd_signal = m.signal;
// indicators.macd_histogram = m.histogram;
const bb = bollingerBands(close);
indicators.bb_upper = bb.upper;
indicators.bb_middle = bb.middle;
indicators.bb_lower = bb.lower;
// const bb = bollingerBands(close);
// indicators.bb_upper = bb.upper;
// indicators.bb_middle = bb.middle;
// indicators.bb_lower = bb.lower;
return {
data,
lookback: 100,
indicators,
signals: {},
};
}
// return {
// data,
// lookback: 100,
// indicators,
// signals: {},
// };
// }
private executeStrategy(
context: VectorizedContext,
strategyCode: string
): Record<string, number[]> {
// This is a simplified strategy execution
// In production, you'd want a more sophisticated strategy compiler/interpreter
const signals: Record<string, number[]> = {
buy: new Array(context.data.length).fill(0),
sell: new Array(context.data.length).fill(0),
};
// private executeStrategy(
// context: VectorizedContext,
// strategyCode: string
// ): Record<string, number[]> {
// // This is a simplified strategy execution
// // In production, you'd want a more sophisticated strategy compiler/interpreter
// const signals: Record<string, number[]> = {
// buy: new Array(context.data.length).fill(0),
// sell: new Array(context.data.length).fill(0),
// };
// Example: Simple moving average crossover strategy
if (strategyCode.includes('sma_crossover')) {
const sma20 = context.indicators.sma_20;
const sma50 = context.indicators.sma_50;
// // Example: Simple moving average crossover strategy
// if (strategyCode.includes('sma_crossover')) {
// const sma20 = context.indicators.sma_20;
// const sma50 = context.indicators.sma_50;
for (let i = 1; i < sma20.length; i++) {
// Buy signal: SMA20 crosses above SMA50
if (!isNaN(sma20[i]) && !isNaN(sma50[i]) && !isNaN(sma20[i - 1]) && !isNaN(sma50[i - 1])) {
if (sma20[i] > sma50[i] && sma20[i - 1] <= sma50[i - 1]) {
signals.buy[i] = 1;
}
// Sell signal: SMA20 crosses below SMA50
else if (sma20[i] < sma50[i] && sma20[i - 1] >= sma50[i - 1]) {
signals.sell[i] = 1;
}
}
}
}
// for (let i = 1; i < sma20.length; i++) {
// // Buy signal: SMA20 crosses above SMA50
// if (!isNaN(sma20[i]) && !isNaN(sma50[i]) && !isNaN(sma20[i - 1]) && !isNaN(sma50[i - 1])) {
// if (sma20[i] > sma50[i] && sma20[i - 1] <= sma50[i - 1]) {
// signals.buy[i] = 1;
// }
// // Sell signal: SMA20 crosses below SMA50
// else if (sma20[i] < sma50[i] && sma20[i - 1] >= sma50[i - 1]) {
// signals.sell[i] = 1;
// }
// }
// }
// }
return signals;
}
// return signals;
// }
private generateTrades(data: DataFrame, signals: Record<string, number[]>): VectorizedTrade[] {
const trades: VectorizedTrade[] = [];
const close = data.getColumn('close');
const timestamps = data.getColumn('timestamp');
// private generateTrades(data: DataFrame, signals: Record<string, number[]>): VectorizedTrade[] {
// const trades: VectorizedTrade[] = [];
// const close = data.getColumn('close');
// const timestamps = data.getColumn('timestamp');
let position: { index: number; price: number; side: 'LONG' | 'SHORT' } | null = null;
// let position: { index: number; price: number; side: 'LONG' | 'SHORT' } | null = null;
for (let i = 0; i < close.length; i++) {
if (signals.buy[i] === 1 && !position) {
// Open long position
position = {
index: i,
price: close[i],
side: 'LONG',
};
} else if (signals.sell[i] === 1) {
if (position && position.side === 'LONG') {
// Close long position
const trade: VectorizedTrade = {
entryIndex: position.index,
exitIndex: i,
entryPrice: position.price,
exitPrice: close[i],
quantity: 1, // Simplified: always trade 1 unit
side: 'LONG',
pnl: close[i] - position.price,
return: (close[i] - position.price) / position.price,
duration: timestamps[i] - timestamps[position.index],
};
trades.push(trade);
position = null;
} else if (!position) {
// Open short position
position = {
index: i,
price: close[i],
side: 'SHORT',
};
}
} else if (signals.buy[i] === 1 && position && position.side === 'SHORT') {
// Close short position
const trade: VectorizedTrade = {
entryIndex: position.index,
exitIndex: i,
entryPrice: position.price,
exitPrice: close[i],
quantity: 1,
side: 'SHORT',
pnl: position.price - close[i],
return: (position.price - close[i]) / position.price,
duration: timestamps[i] - timestamps[position.index],
};
trades.push(trade);
position = null;
}
}
// for (let i = 0; i < close.length; i++) {
// if (signals.buy[i] === 1 && !position) {
// // Open long position
// position = {
// index: i,
// price: close[i],
// side: 'LONG',
// };
// } else if (signals.sell[i] === 1) {
// if (position && position.side === 'LONG') {
// // Close long position
// const trade: VectorizedTrade = {
// entryIndex: position.index,
// exitIndex: i,
// entryPrice: position.price,
// exitPrice: close[i],
// quantity: 1, // Simplified: always trade 1 unit
// side: 'LONG',
// pnl: close[i] - position.price,
// return: (close[i] - position.price) / position.price,
// duration: timestamps[i] - timestamps[position.index],
// };
// trades.push(trade);
// position = null;
// } else if (!position) {
// // Open short position
// position = {
// index: i,
// price: close[i],
// side: 'SHORT',
// };
// }
// } else if (signals.buy[i] === 1 && position && position.side === 'SHORT') {
// // Close short position
// const trade: VectorizedTrade = {
// entryIndex: position.index,
// exitIndex: i,
// entryPrice: position.price,
// exitPrice: close[i],
// quantity: 1,
// side: 'SHORT',
// pnl: position.price - close[i],
// return: (position.price - close[i]) / position.price,
// duration: timestamps[i] - timestamps[position.index],
// };
// trades.push(trade);
// position = null;
// }
// }
return trades;
}
// return trades;
// }
private calculateMetrics(data: DataFrame, trades: VectorizedTrade[]): VectorizedMetrics {
if (trades.length === 0) {
return {
totalReturns: 0,
sharpeRatio: 0,
maxDrawdown: 0,
winRate: 0,
profitFactor: 0,
totalTrades: 0,
avgTrade: 0,
returns: [],
drawdown: [],
equity: [],
};
}
// private calculateMetrics(data: DataFrame, trades: VectorizedTrade[]): VectorizedMetrics {
// if (trades.length === 0) {
// return {
// totalReturns: 0,
// sharpeRatio: 0,
// maxDrawdown: 0,
// winRate: 0,
// profitFactor: 0,
// totalTrades: 0,
// avgTrade: 0,
// returns: [],
// drawdown: [],
// equity: [],
// };
// }
const returns = trades.map(t => t.return);
const pnls = trades.map(t => t.pnl);
// const returns = trades.map(t => t.return);
// const pnls = trades.map(t => t.pnl);
// Calculate equity curve
const equity: number[] = [10000]; // Starting capital
let currentEquity = 10000;
// // Calculate equity curve
// const equity: number[] = [10000]; // Starting capital
// let currentEquity = 10000;
for (const trade of trades) {
currentEquity += trade.pnl;
equity.push(currentEquity);
}
// for (const trade of trades) {
// currentEquity += trade.pnl;
// equity.push(currentEquity);
// }
// Calculate drawdown
const drawdown: number[] = [];
let peak = equity[0];
// // Calculate drawdown
// const drawdown: number[] = [];
// let peak = equity[0];
for (const eq of equity) {
if (eq > peak) {
peak = eq;
}
drawdown.push((peak - eq) / peak);
}
// for (const eq of equity) {
// if (eq > peak) {
// peak = eq;
// }
// drawdown.push((peak - eq) / peak);
// }
const totalReturns = (equity[equity.length - 1] - equity[0]) / equity[0];
const avgReturn = returns.reduce((sum, r) => sum + r, 0) / returns.length;
const returnStd = Math.sqrt(
returns.reduce((sum, r) => sum + Math.pow(r - avgReturn, 2), 0) / returns.length
);
// const totalReturns = (equity[equity.length - 1] - equity[0]) / equity[0];
// const avgReturn = returns.reduce((sum, r) => sum + r, 0) / returns.length;
// const returnStd = Math.sqrt(
// returns.reduce((sum, r) => sum + Math.pow(r - avgReturn, 2), 0) / returns.length
// );
const winningTrades = trades.filter(t => t.pnl > 0);
const losingTrades = trades.filter(t => t.pnl < 0);
// const winningTrades = trades.filter(t => t.pnl > 0);
// const losingTrades = trades.filter(t => t.pnl < 0);
const grossProfit = winningTrades.reduce((sum, t) => sum + t.pnl, 0);
const grossLoss = Math.abs(losingTrades.reduce((sum, t) => sum + t.pnl, 0));
// const grossProfit = winningTrades.reduce((sum, t) => sum + t.pnl, 0);
// const grossLoss = Math.abs(losingTrades.reduce((sum, t) => sum + t.pnl, 0));
return {
totalReturns,
sharpeRatio: returnStd !== 0 ? (avgReturn / returnStd) * Math.sqrt(252) : 0,
maxDrawdown: Math.max(...drawdown),
winRate: winningTrades.length / trades.length,
profitFactor: grossLoss !== 0 ? grossProfit / grossLoss : Infinity,
totalTrades: trades.length,
avgTrade: pnls.reduce((sum, pnl) => sum + pnl, 0) / trades.length,
returns,
drawdown,
equity,
};
}
// return {
// totalReturns,
// sharpeRatio: returnStd !== 0 ? (avgReturn / returnStd) * Math.sqrt(252) : 0,
// maxDrawdown: Math.max(...drawdown),
// winRate: winningTrades.length / trades.length,
// profitFactor: grossLoss !== 0 ? grossProfit / grossLoss : Infinity,
// totalTrades: trades.length,
// avgTrade: pnls.reduce((sum, pnl) => sum + pnl, 0) / trades.length,
// returns,
// drawdown,
// equity,
// };
// }
// Utility methods for vectorized operations
applyOperation(operationName: string, inputs: Record<string, number[]>): number[] {
const operation = this.operations.get(operationName);
if (!operation) {
throw new Error(`Operation '${operationName}' not found`);
}
// // Utility methods for vectorized operations
// applyOperation(operationName: string, inputs: Record<string, number[]>): number[] {
// const operation = this.operations.get(operationName);
// if (!operation) {
// throw new Error(`Operation '${operationName}' not found`);
// }
const inputArrays = operation.inputs.map(inputName => {
if (!inputs[inputName]) {
throw new Error(`Input '${inputName}' not provided for operation '${operationName}'`);
}
return inputs[inputName];
});
// const inputArrays = operation.inputs.map(inputName => {
// if (!inputs[inputName]) {
// throw new Error(`Input '${inputName}' not provided for operation '${operationName}'`);
// }
// return inputs[inputName];
// });
return operation.operation(inputArrays);
}
// return operation.operation(inputArrays);
// }
// Batch processing for multiple strategies
async batchBacktest(
data: DataFrame,
strategies: Array<{ id: string; code: string }>
): Promise<Record<string, VectorizedBacktestResult>> {
const results: Record<string, VectorizedBacktestResult> = {};
// // Batch processing for multiple strategies
// async batchBacktest(
// data: DataFrame,
// strategies: Array<{ id: string; code: string }>
// ): Promise<Record<string, VectorizedBacktestResult>> {
// const results: Record<string, VectorizedBacktestResult> = {};
for (const strategy of strategies) {
try {
this.logger.info(`Running vectorized backtest for strategy: ${strategy.id}`);
results[strategy.id] = await this.executeVectorizedStrategy(data, strategy.code);
} catch (error) {
this.logger.error(`Backtest failed for strategy: ${strategy.id}`, error);
// Continue with other strategies
}
}
// for (const strategy of strategies) {
// try {
// this.logger.info(`Running vectorized backtest for strategy: ${strategy.id}`);
// results[strategy.id] = await this.executeVectorizedStrategy(data, strategy.code);
// } catch (error) {
// this.logger.error(`Backtest failed for strategy: ${strategy.id}`, error);
// // Continue with other strategies
// }
// }
return results;
}
}
// return results;
// }
// }

View file

@ -1,8 +1,4 @@
{
"extends": "../../tsconfig.lib.json",
"references": [
{ "path": "../logger" },
{ "path": "../utils" },
{ "path": "../data-frame" }
]
"references": [{ "path": "../logger" }, { "path": "../utils" }]
}