This commit is contained in:
Boki 2025-06-25 11:38:23 -04:00
parent 3a7254708e
commit b63e58784c
41 changed files with 5762 additions and 4477 deletions

View file

@ -1,8 +1,8 @@
import { Queue as BullQueue, type Job } from 'bullmq';
import type { CacheProvider } from '@stock-bot/cache';
import { createCache } from '@stock-bot/cache';
import type { HandlerRegistry } from '@stock-bot/handler-registry';
import { getLogger } from '@stock-bot/logger';
import { Queue as BullQueue, type Job } from 'bullmq';
import { Queue, type QueueWorkerConfig } from './queue';
import { QueueRateLimiter } from './rate-limiter';
import { getFullQueueName, parseQueueName } from './service-utils';

View file

@ -7,10 +7,12 @@ export function getRedisConnection(config: RedisConfig) {
const isTest = process.env.NODE_ENV === 'test' || process.env['BUNIT'] === '1';
// In test mode, always use localhost
const testConfig = isTest ? {
host: 'localhost',
port: 6379,
} : config;
const testConfig = isTest
? {
host: 'localhost',
port: 6379,
}
: config;
const baseConfig = {
host: testConfig.host,

View file

@ -1,257 +1,311 @@
import { describe, expect, it, mock, beforeEach, type Mock } from 'bun:test';
import { processBatchJob, processItems } from '../src/batch-processor';
import type { BatchJobData, ProcessOptions, QueueManager, Queue } from '../src/types';
import type { Logger } from '@stock-bot/logger';
describe('Batch Processor', () => {
type MockLogger = {
info: Mock<(message: string, meta?: any) => void>;
error: Mock<(message: string, meta?: any) => void>;
warn: Mock<(message: string, meta?: any) => void>;
debug: Mock<(message: string, meta?: any) => void>;
trace: Mock<(message: string, meta?: any) => void>;
};
type MockQueue = {
add: Mock<(name: string, data: any, options?: any) => Promise<{ id: string }>>;
addBulk: Mock<(jobs: Array<{ name: string; data: any; opts?: any }>) => Promise<Array<{ id: string }>>>;
createChildLogger: Mock<(component: string, meta?: any) => MockLogger>;
getName: Mock<() => string>;
};
type MockQueueManager = {
getQueue: Mock<(name: string) => MockQueue>;
getCache: Mock<(name: string) => { get: Mock<(key: string) => Promise<any>>; set: Mock<(key: string, value: any, ttl?: number) => Promise<void>>; del: Mock<(key: string) => Promise<void>> }>;
};
let mockLogger: MockLogger;
let mockQueue: MockQueue;
let mockQueueManager: MockQueueManager;
let mockCache: {
get: Mock<(key: string) => Promise<any>>;
set: Mock<(key: string, value: any, ttl?: number) => Promise<void>>;
del: Mock<(key: string) => Promise<void>>;
};
beforeEach(() => {
mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
mockQueue = {
add: mock(async () => ({ id: 'job-123' })),
addBulk: mock(async (jobs) => jobs.map((_, i) => ({ id: `job-${i + 1}` }))),
createChildLogger: mock(() => mockLogger),
getName: mock(() => 'test-queue'),
};
mockCache = {
get: mock(async () => null),
set: mock(async () => {}),
del: mock(async () => {}),
};
mockQueueManager = {
getQueue: mock(() => mockQueue),
getCache: mock(() => mockCache),
};
});
describe('processBatchJob', () => {
it('should process all items successfully', async () => {
const batchData: BatchJobData = {
payloadKey: 'test-payload-key',
batchIndex: 0,
totalBatches: 1,
itemCount: 3,
totalDelayHours: 0,
};
// Mock the cached payload
const cachedPayload = {
items: ['item1', 'item2', 'item3'],
options: {
batchSize: 2,
concurrency: 1,
},
};
mockCache.get.mockImplementation(async () => cachedPayload);
const result = await processBatchJob(batchData, 'test-queue', mockQueueManager as unknown as QueueManager);
expect(mockCache.get).toHaveBeenCalledWith('test-payload-key');
expect(mockQueue.addBulk).toHaveBeenCalled();
expect(result).toBeDefined();
});
it('should handle partial failures', async () => {
const batchData: BatchJobData = {
payloadKey: 'test-payload-key',
batchIndex: 0,
totalBatches: 1,
itemCount: 3,
totalDelayHours: 0,
};
// Mock the cached payload
const cachedPayload = {
items: ['item1', 'item2', 'item3'],
options: {},
};
mockCache.get.mockImplementation(async () => cachedPayload);
// Make addBulk throw an error to simulate failure
mockQueue.addBulk.mockImplementation(async () => {
throw new Error('Failed to add jobs');
});
// processBatchJob should still complete even if addBulk fails
const result = await processBatchJob(batchData, 'test-queue', mockQueueManager as unknown as QueueManager);
expect(mockQueue.addBulk).toHaveBeenCalled();
// The error is logged in addJobsInChunks, not in processBatchJob
expect(mockLogger.error).toHaveBeenCalledWith('Failed to add job chunk', expect.any(Object));
});
it('should handle empty items', async () => {
const batchData: BatchJobData = {
payloadKey: 'test-payload-key',
batchIndex: 0,
totalBatches: 1,
itemCount: 0,
totalDelayHours: 0,
};
// Mock the cached payload with empty items
const cachedPayload = {
items: [],
options: {},
};
mockCache.get.mockImplementation(async () => cachedPayload);
const result = await processBatchJob(batchData, 'test-queue', mockQueueManager as unknown as QueueManager);
expect(mockQueue.addBulk).not.toHaveBeenCalled();
expect(result).toBeDefined();
});
it('should track duration', async () => {
const batchData: BatchJobData = {
payloadKey: 'test-payload-key',
batchIndex: 0,
totalBatches: 1,
itemCount: 1,
totalDelayHours: 0,
};
// Mock the cached payload
const cachedPayload = {
items: ['item1'],
options: {},
};
mockCache.get.mockImplementation(async () => cachedPayload);
// Add delay to queue.add
mockQueue.add.mockImplementation(() =>
new Promise(resolve => setTimeout(() => resolve({ id: 'job-1' }), 10))
);
const result = await processBatchJob(batchData, 'test-queue', mockQueueManager as unknown as QueueManager);
expect(result).toBeDefined();
// The function doesn't return duration in its result
});
});
describe('processItems', () => {
it('should process items with default options', async () => {
const items = [1, 2, 3, 4, 5];
const options: ProcessOptions = { totalDelayHours: 0 };
const result = await processItems(items, 'test-queue', options, mockQueueManager as unknown as QueueManager);
expect(result.totalItems).toBe(5);
expect(result.jobsCreated).toBe(5);
expect(result.mode).toBe('direct');
expect(mockQueue.addBulk).toHaveBeenCalled();
});
it('should process items in batches', async () => {
const items = [1, 2, 3, 4, 5];
const options: ProcessOptions = {
totalDelayHours: 0,
useBatching: true,
batchSize: 2,
};
const result = await processItems(items, 'test-queue', options, mockQueueManager as unknown as QueueManager);
expect(result.totalItems).toBe(5);
expect(result.mode).toBe('batch');
// When batching is enabled, it creates batch jobs instead of individual jobs
expect(mockQueue.addBulk).toHaveBeenCalled();
});
it('should handle concurrent processing', async () => {
const items = [1, 2, 3, 4];
const options: ProcessOptions = {
totalDelayHours: 0,
};
const result = await processItems(items, 'test-queue', options, mockQueueManager as unknown as QueueManager);
expect(result.totalItems).toBe(4);
expect(result.jobsCreated).toBe(4);
expect(mockQueue.addBulk).toHaveBeenCalled();
});
it('should handle empty array', async () => {
const items: number[] = [];
const options: ProcessOptions = { totalDelayHours: 0 };
const result = await processItems(items, 'test-queue', options, mockQueueManager as unknown as QueueManager);
expect(result.totalItems).toBe(0);
expect(result.jobsCreated).toBe(0);
expect(result.mode).toBe('direct');
expect(mockQueue.addBulk).not.toHaveBeenCalled();
});
it('should propagate errors', async () => {
const items = [1, 2, 3];
const options: ProcessOptions = { totalDelayHours: 0 };
// Make queue.addBulk throw an error
mockQueue.addBulk.mockImplementation(async () => {
throw new Error('Process error');
});
// processItems catches errors and continues, so it won't reject
const result = await processItems(items, 'test-queue', options, mockQueueManager as unknown as QueueManager);
expect(result.jobsCreated).toBe(0);
expect(mockQueue.addBulk).toHaveBeenCalled();
expect(mockLogger.error).toHaveBeenCalledWith('Failed to add job chunk', expect.any(Object));
});
it('should process large batches efficiently', async () => {
const items = Array.from({ length: 100 }, (_, i) => i);
const options: ProcessOptions = {
totalDelayHours: 0,
useBatching: true,
batchSize: 20,
};
const result = await processItems(items, 'test-queue', options, mockQueueManager as unknown as QueueManager);
expect(result.totalItems).toBe(100);
expect(result.mode).toBe('batch');
// With batching enabled and batch size 20, we should have 5 batch jobs
expect(mockQueue.addBulk).toHaveBeenCalled();
});
});
});
import { beforeEach, describe, expect, it, mock, type Mock } from 'bun:test';
import type { Logger } from '@stock-bot/logger';
import { processBatchJob, processItems } from '../src/batch-processor';
import type { BatchJobData, ProcessOptions, Queue, QueueManager } from '../src/types';
describe('Batch Processor', () => {
type MockLogger = {
info: Mock<(message: string, meta?: any) => void>;
error: Mock<(message: string, meta?: any) => void>;
warn: Mock<(message: string, meta?: any) => void>;
debug: Mock<(message: string, meta?: any) => void>;
trace: Mock<(message: string, meta?: any) => void>;
};
type MockQueue = {
add: Mock<(name: string, data: any, options?: any) => Promise<{ id: string }>>;
addBulk: Mock<
(jobs: Array<{ name: string; data: any; opts?: any }>) => Promise<Array<{ id: string }>>
>;
createChildLogger: Mock<(component: string, meta?: any) => MockLogger>;
getName: Mock<() => string>;
};
type MockQueueManager = {
getQueue: Mock<(name: string) => MockQueue>;
getCache: Mock<
(name: string) => {
get: Mock<(key: string) => Promise<any>>;
set: Mock<(key: string, value: any, ttl?: number) => Promise<void>>;
del: Mock<(key: string) => Promise<void>>;
}
>;
};
let mockLogger: MockLogger;
let mockQueue: MockQueue;
let mockQueueManager: MockQueueManager;
let mockCache: {
get: Mock<(key: string) => Promise<any>>;
set: Mock<(key: string, value: any, ttl?: number) => Promise<void>>;
del: Mock<(key: string) => Promise<void>>;
};
beforeEach(() => {
mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
mockQueue = {
add: mock(async () => ({ id: 'job-123' })),
addBulk: mock(async jobs => jobs.map((_, i) => ({ id: `job-${i + 1}` }))),
createChildLogger: mock(() => mockLogger),
getName: mock(() => 'test-queue'),
};
mockCache = {
get: mock(async () => null),
set: mock(async () => {}),
del: mock(async () => {}),
};
mockQueueManager = {
getQueue: mock(() => mockQueue),
getCache: mock(() => mockCache),
};
});
describe('processBatchJob', () => {
it('should process all items successfully', async () => {
const batchData: BatchJobData = {
payloadKey: 'test-payload-key',
batchIndex: 0,
totalBatches: 1,
itemCount: 3,
totalDelayHours: 0,
};
// Mock the cached payload
const cachedPayload = {
items: ['item1', 'item2', 'item3'],
options: {
batchSize: 2,
concurrency: 1,
},
};
mockCache.get.mockImplementation(async () => cachedPayload);
const result = await processBatchJob(
batchData,
'test-queue',
mockQueueManager as unknown as QueueManager
);
expect(mockCache.get).toHaveBeenCalledWith('test-payload-key');
expect(mockQueue.addBulk).toHaveBeenCalled();
expect(result).toBeDefined();
});
it('should handle partial failures', async () => {
const batchData: BatchJobData = {
payloadKey: 'test-payload-key',
batchIndex: 0,
totalBatches: 1,
itemCount: 3,
totalDelayHours: 0,
};
// Mock the cached payload
const cachedPayload = {
items: ['item1', 'item2', 'item3'],
options: {},
};
mockCache.get.mockImplementation(async () => cachedPayload);
// Make addBulk throw an error to simulate failure
mockQueue.addBulk.mockImplementation(async () => {
throw new Error('Failed to add jobs');
});
// processBatchJob should still complete even if addBulk fails
const result = await processBatchJob(
batchData,
'test-queue',
mockQueueManager as unknown as QueueManager
);
expect(mockQueue.addBulk).toHaveBeenCalled();
// The error is logged in addJobsInChunks, not in processBatchJob
expect(mockLogger.error).toHaveBeenCalledWith('Failed to add job chunk', expect.any(Object));
});
it('should handle empty items', async () => {
const batchData: BatchJobData = {
payloadKey: 'test-payload-key',
batchIndex: 0,
totalBatches: 1,
itemCount: 0,
totalDelayHours: 0,
};
// Mock the cached payload with empty items
const cachedPayload = {
items: [],
options: {},
};
mockCache.get.mockImplementation(async () => cachedPayload);
const result = await processBatchJob(
batchData,
'test-queue',
mockQueueManager as unknown as QueueManager
);
expect(mockQueue.addBulk).not.toHaveBeenCalled();
expect(result).toBeDefined();
});
it('should track duration', async () => {
const batchData: BatchJobData = {
payloadKey: 'test-payload-key',
batchIndex: 0,
totalBatches: 1,
itemCount: 1,
totalDelayHours: 0,
};
// Mock the cached payload
const cachedPayload = {
items: ['item1'],
options: {},
};
mockCache.get.mockImplementation(async () => cachedPayload);
// Add delay to queue.add
mockQueue.add.mockImplementation(
() => new Promise(resolve => setTimeout(() => resolve({ id: 'job-1' }), 10))
);
const result = await processBatchJob(
batchData,
'test-queue',
mockQueueManager as unknown as QueueManager
);
expect(result).toBeDefined();
// The function doesn't return duration in its result
});
});
describe('processItems', () => {
it('should process items with default options', async () => {
const items = [1, 2, 3, 4, 5];
const options: ProcessOptions = { totalDelayHours: 0 };
const result = await processItems(
items,
'test-queue',
options,
mockQueueManager as unknown as QueueManager
);
expect(result.totalItems).toBe(5);
expect(result.jobsCreated).toBe(5);
expect(result.mode).toBe('direct');
expect(mockQueue.addBulk).toHaveBeenCalled();
});
it('should process items in batches', async () => {
const items = [1, 2, 3, 4, 5];
const options: ProcessOptions = {
totalDelayHours: 0,
useBatching: true,
batchSize: 2,
};
const result = await processItems(
items,
'test-queue',
options,
mockQueueManager as unknown as QueueManager
);
expect(result.totalItems).toBe(5);
expect(result.mode).toBe('batch');
// When batching is enabled, it creates batch jobs instead of individual jobs
expect(mockQueue.addBulk).toHaveBeenCalled();
});
it('should handle concurrent processing', async () => {
const items = [1, 2, 3, 4];
const options: ProcessOptions = {
totalDelayHours: 0,
};
const result = await processItems(
items,
'test-queue',
options,
mockQueueManager as unknown as QueueManager
);
expect(result.totalItems).toBe(4);
expect(result.jobsCreated).toBe(4);
expect(mockQueue.addBulk).toHaveBeenCalled();
});
it('should handle empty array', async () => {
const items: number[] = [];
const options: ProcessOptions = { totalDelayHours: 0 };
const result = await processItems(
items,
'test-queue',
options,
mockQueueManager as unknown as QueueManager
);
expect(result.totalItems).toBe(0);
expect(result.jobsCreated).toBe(0);
expect(result.mode).toBe('direct');
expect(mockQueue.addBulk).not.toHaveBeenCalled();
});
it('should propagate errors', async () => {
const items = [1, 2, 3];
const options: ProcessOptions = { totalDelayHours: 0 };
// Make queue.addBulk throw an error
mockQueue.addBulk.mockImplementation(async () => {
throw new Error('Process error');
});
// processItems catches errors and continues, so it won't reject
const result = await processItems(
items,
'test-queue',
options,
mockQueueManager as unknown as QueueManager
);
expect(result.jobsCreated).toBe(0);
expect(mockQueue.addBulk).toHaveBeenCalled();
expect(mockLogger.error).toHaveBeenCalledWith('Failed to add job chunk', expect.any(Object));
});
it('should process large batches efficiently', async () => {
const items = Array.from({ length: 100 }, (_, i) => i);
const options: ProcessOptions = {
totalDelayHours: 0,
useBatching: true,
batchSize: 20,
};
const result = await processItems(
items,
'test-queue',
options,
mockQueueManager as unknown as QueueManager
);
expect(result.totalItems).toBe(100);
expect(result.mode).toBe('batch');
// With batching enabled and batch size 20, we should have 5 batch jobs
expect(mockQueue.addBulk).toHaveBeenCalled();
});
});
});

View file

@ -1,6 +1,6 @@
import type { Job, Queue } from 'bullmq';
import { beforeEach, describe, expect, it, mock } from 'bun:test';
import { DeadLetterQueueHandler } from '../src/dlq-handler';
import type { Job, Queue } from 'bullmq';
import type { RedisConfig } from '../src/types';
describe('DeadLetterQueueHandler', () => {
@ -275,4 +275,4 @@ describe('DeadLetterQueueHandler', () => {
expect(mockClose).toHaveBeenCalled();
});
});
})
});

View file

@ -1,125 +1,125 @@
import { beforeEach, describe, expect, it, mock } from 'bun:test';
import { Queue } from '../src/queue';
import type { RedisConfig, JobData, QueueWorkerConfig } from '../src/types';
describe('Queue Class', () => {
const mockRedisConfig: RedisConfig = {
host: 'localhost',
port: 6379,
};
describe('basic functionality', () => {
it('should create queue with minimal config', () => {
const queue = new Queue('test-queue', mockRedisConfig);
expect(queue).toBeDefined();
expect(queue.getName()).toBe('test-queue');
});
it('should create queue with default job options', () => {
const defaultJobOptions = {
attempts: 5,
backoff: { type: 'exponential' as const, delay: 2000 },
};
const queue = new Queue('test-queue', mockRedisConfig, defaultJobOptions);
expect(queue).toBeDefined();
expect(queue.getName()).toBe('test-queue');
});
it('should create queue with custom logger', () => {
const mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
const queue = new Queue('test-queue', mockRedisConfig, {}, {}, mockLogger);
expect(queue).toBeDefined();
});
it('should create queue with worker config', () => {
const workerConfig: QueueWorkerConfig = {
workers: 2,
concurrency: 5,
startWorker: false, // Don't actually start workers
serviceName: 'test-service',
};
const queue = new Queue('test-queue', mockRedisConfig, {}, workerConfig);
expect(queue).toBeDefined();
});
});
describe('queue naming and utilities', () => {
it('should return queue name', () => {
const queue = new Queue('my-test-queue', mockRedisConfig);
expect(queue.getName()).toBe('my-test-queue');
});
it('should get bull queue instance', () => {
const queue = new Queue('test-queue', mockRedisConfig);
const bullQueue = queue.getBullQueue();
expect(bullQueue).toBeDefined();
});
it('should create child logger with logger that supports child', () => {
const mockChildLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
const mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
child: mock(() => mockChildLogger),
};
const queue = new Queue('test-queue', mockRedisConfig, {}, {}, mockLogger);
const childLogger = queue.createChildLogger('batch', { batchId: '123' });
expect(childLogger).toBe(mockChildLogger);
expect(mockLogger.child).toHaveBeenCalledWith('batch', { batchId: '123' });
});
it('should fallback to main logger if child not supported', () => {
const mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
const queue = new Queue('test-queue', mockRedisConfig, {}, {}, mockLogger);
const childLogger = queue.createChildLogger('batch', { batchId: '123' });
expect(childLogger).toBe(mockLogger);
});
});
describe('worker count methods', () => {
it('should get worker count when no workers', () => {
const queue = new Queue('test-queue', mockRedisConfig);
expect(queue.getWorkerCount()).toBe(0);
});
it('should handle worker count with workers config', () => {
const workerConfig: QueueWorkerConfig = {
workers: 3,
startWorker: false, // Don't actually start
};
const queue = new Queue('test-queue', mockRedisConfig, {}, workerConfig);
// Workers aren't actually started with startWorker: false
expect(queue.getWorkerCount()).toBe(0);
});
});
});
import { beforeEach, describe, expect, it, mock } from 'bun:test';
import { Queue } from '../src/queue';
import type { JobData, QueueWorkerConfig, RedisConfig } from '../src/types';
describe('Queue Class', () => {
const mockRedisConfig: RedisConfig = {
host: 'localhost',
port: 6379,
};
describe('basic functionality', () => {
it('should create queue with minimal config', () => {
const queue = new Queue('test-queue', mockRedisConfig);
expect(queue).toBeDefined();
expect(queue.getName()).toBe('test-queue');
});
it('should create queue with default job options', () => {
const defaultJobOptions = {
attempts: 5,
backoff: { type: 'exponential' as const, delay: 2000 },
};
const queue = new Queue('test-queue', mockRedisConfig, defaultJobOptions);
expect(queue).toBeDefined();
expect(queue.getName()).toBe('test-queue');
});
it('should create queue with custom logger', () => {
const mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
const queue = new Queue('test-queue', mockRedisConfig, {}, {}, mockLogger);
expect(queue).toBeDefined();
});
it('should create queue with worker config', () => {
const workerConfig: QueueWorkerConfig = {
workers: 2,
concurrency: 5,
startWorker: false, // Don't actually start workers
serviceName: 'test-service',
};
const queue = new Queue('test-queue', mockRedisConfig, {}, workerConfig);
expect(queue).toBeDefined();
});
});
describe('queue naming and utilities', () => {
it('should return queue name', () => {
const queue = new Queue('my-test-queue', mockRedisConfig);
expect(queue.getName()).toBe('my-test-queue');
});
it('should get bull queue instance', () => {
const queue = new Queue('test-queue', mockRedisConfig);
const bullQueue = queue.getBullQueue();
expect(bullQueue).toBeDefined();
});
it('should create child logger with logger that supports child', () => {
const mockChildLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
const mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
child: mock(() => mockChildLogger),
};
const queue = new Queue('test-queue', mockRedisConfig, {}, {}, mockLogger);
const childLogger = queue.createChildLogger('batch', { batchId: '123' });
expect(childLogger).toBe(mockChildLogger);
expect(mockLogger.child).toHaveBeenCalledWith('batch', { batchId: '123' });
});
it('should fallback to main logger if child not supported', () => {
const mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
const queue = new Queue('test-queue', mockRedisConfig, {}, {}, mockLogger);
const childLogger = queue.createChildLogger('batch', { batchId: '123' });
expect(childLogger).toBe(mockLogger);
});
});
describe('worker count methods', () => {
it('should get worker count when no workers', () => {
const queue = new Queue('test-queue', mockRedisConfig);
expect(queue.getWorkerCount()).toBe(0);
});
it('should handle worker count with workers config', () => {
const workerConfig: QueueWorkerConfig = {
workers: 3,
startWorker: false, // Don't actually start
};
const queue = new Queue('test-queue', mockRedisConfig, {}, workerConfig);
// Workers aren't actually started with startWorker: false
expect(queue.getWorkerCount()).toBe(0);
});
});
});

View file

@ -1,232 +1,244 @@
import { beforeEach, describe, expect, it, mock } from 'bun:test';
import { QueueManager } from '../src/queue-manager';
import type { RedisConfig, QueueManagerConfig } from '../src/types';
describe.skip('QueueManager', () => {
// Skipping these tests as they require real Redis connection
// TODO: Create mock implementation or use testcontainers
const mockRedisConfig: RedisConfig = {
host: 'localhost',
port: 6379,
};
const mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
describe('constructor', () => {
it('should create queue manager with default config', () => {
const manager = new QueueManager(mockRedisConfig);
expect(manager).toBeDefined();
});
it('should create queue manager with custom config', () => {
const config: QueueManagerConfig = {
defaultJobOptions: {
attempts: 5,
removeOnComplete: 50,
},
enableMetrics: true,
enableScheduler: true,
};
const manager = new QueueManager(mockRedisConfig, config, mockLogger);
expect(manager).toBeDefined();
});
});
describe('queue operations', () => {
let manager: QueueManager;
beforeEach(() => {
manager = new QueueManager(mockRedisConfig, {}, mockLogger);
});
it('should create or get queue', () => {
const queue = manager.createQueue('test-queue');
expect(queue).toBeDefined();
expect(queue.getName()).toBe('test-queue');
});
it('should return same queue instance', () => {
const queue1 = manager.createQueue('test-queue');
const queue2 = manager.createQueue('test-queue');
expect(queue1).toBe(queue2);
});
it('should create queue with options', () => {
const queue = manager.createQueue('test-queue', {
concurrency: 5,
workers: 2,
});
expect(queue).toBeDefined();
});
it('should get existing queue', () => {
manager.createQueue('test-queue');
const queue = manager.getQueue('test-queue');
expect(queue).toBeDefined();
});
it('should return undefined for non-existent queue', () => {
const queue = manager.getQueue('non-existent');
expect(queue).toBeUndefined();
});
it('should list all queues', () => {
manager.createQueue('queue1');
manager.createQueue('queue2');
const queues = manager.getQueues();
expect(queues).toHaveLength(2);
expect(queues.map(q => q.getName())).toContain('queue1');
expect(queues.map(q => q.getName())).toContain('queue2');
});
it('should check if queue exists', () => {
manager.createQueue('test-queue');
expect(manager.hasQueue('test-queue')).toBe(true);
expect(manager.hasQueue('non-existent')).toBe(false);
});
});
describe('cache operations', () => {
let manager: QueueManager;
beforeEach(() => {
manager = new QueueManager(mockRedisConfig, {}, mockLogger);
});
it('should create cache', () => {
const cache = manager.createCache('test-cache');
expect(cache).toBeDefined();
});
it('should get existing cache', () => {
manager.createCache('test-cache');
const cache = manager.getCache('test-cache');
expect(cache).toBeDefined();
});
it('should return same cache instance', () => {
const cache1 = manager.createCache('test-cache');
const cache2 = manager.createCache('test-cache');
expect(cache1).toBe(cache2);
});
});
describe('service discovery', () => {
let manager: QueueManager;
beforeEach(() => {
manager = new QueueManager(mockRedisConfig, {}, mockLogger);
});
it('should configure service name', () => {
manager.configureService('test-service');
expect((manager as any).serviceName).toBe('test-service');
});
it('should register queue route', () => {
manager.configureService('test-service');
manager.registerQueueRoute({
service: 'remote-service',
handler: 'process',
queueName: '{remote-service_process}',
});
expect(manager.hasRoute('remote-service', 'process')).toBe(true);
});
it('should send to remote queue', async () => {
manager.configureService('test-service');
manager.registerQueueRoute({
service: 'remote-service',
handler: 'process',
queueName: '{remote-service_process}',
});
const jobId = await manager.sendToQueue('remote-service', 'process', { data: 'test' });
expect(jobId).toBeDefined();
});
it('should send to local queue', async () => {
manager.configureService('test-service');
manager.createQueue('{test-service_process}');
const jobId = await manager.sendToQueue('test-service', 'process', { data: 'test' });
expect(jobId).toBeDefined();
});
});
describe('shutdown', () => {
it('should shutdown gracefully', async () => {
const manager = new QueueManager(mockRedisConfig, {}, mockLogger);
manager.createQueue('test-queue');
await manager.shutdown();
expect((manager as any).isShuttingDown).toBe(true);
});
it('should handle multiple shutdown calls', async () => {
const manager = new QueueManager(mockRedisConfig, {}, mockLogger);
const promise1 = manager.shutdown();
const promise2 = manager.shutdown();
expect(promise1).toBe(promise2);
await promise1;
});
});
describe('metrics', () => {
it('should get global stats', async () => {
const manager = new QueueManager(mockRedisConfig, {
enableMetrics: true,
}, mockLogger);
manager.createQueue('queue1');
manager.createQueue('queue2');
const stats = await manager.getGlobalStats();
expect(stats).toBeDefined();
expect(stats.totalQueues).toBe(2);
});
it('should get queue stats', async () => {
const manager = new QueueManager(mockRedisConfig, {
enableMetrics: true,
}, mockLogger);
const queue = manager.createQueue('test-queue');
const stats = await manager.getQueueStats('test-queue');
expect(stats).toBeDefined();
expect(stats.name).toBe('test-queue');
});
});
describe('rate limiting', () => {
it('should apply rate limit rules', () => {
const manager = new QueueManager(mockRedisConfig, {
rateLimiter: {
rules: [
{
name: 'api-limit',
max: 100,
duration: 60000,
scope: 'global',
},
],
},
}, mockLogger);
const rateLimiter = (manager as any).rateLimiter;
expect(rateLimiter).toBeDefined();
});
});
});
import { beforeEach, describe, expect, it, mock } from 'bun:test';
import { QueueManager } from '../src/queue-manager';
import type { QueueManagerConfig, RedisConfig } from '../src/types';
describe.skip('QueueManager', () => {
// Skipping these tests as they require real Redis connection
// TODO: Create mock implementation or use testcontainers
const mockRedisConfig: RedisConfig = {
host: 'localhost',
port: 6379,
};
const mockLogger = {
info: mock(() => {}),
error: mock(() => {}),
warn: mock(() => {}),
debug: mock(() => {}),
trace: mock(() => {}),
};
describe('constructor', () => {
it('should create queue manager with default config', () => {
const manager = new QueueManager(mockRedisConfig);
expect(manager).toBeDefined();
});
it('should create queue manager with custom config', () => {
const config: QueueManagerConfig = {
defaultJobOptions: {
attempts: 5,
removeOnComplete: 50,
},
enableMetrics: true,
enableScheduler: true,
};
const manager = new QueueManager(mockRedisConfig, config, mockLogger);
expect(manager).toBeDefined();
});
});
describe('queue operations', () => {
let manager: QueueManager;
beforeEach(() => {
manager = new QueueManager(mockRedisConfig, {}, mockLogger);
});
it('should create or get queue', () => {
const queue = manager.createQueue('test-queue');
expect(queue).toBeDefined();
expect(queue.getName()).toBe('test-queue');
});
it('should return same queue instance', () => {
const queue1 = manager.createQueue('test-queue');
const queue2 = manager.createQueue('test-queue');
expect(queue1).toBe(queue2);
});
it('should create queue with options', () => {
const queue = manager.createQueue('test-queue', {
concurrency: 5,
workers: 2,
});
expect(queue).toBeDefined();
});
it('should get existing queue', () => {
manager.createQueue('test-queue');
const queue = manager.getQueue('test-queue');
expect(queue).toBeDefined();
});
it('should return undefined for non-existent queue', () => {
const queue = manager.getQueue('non-existent');
expect(queue).toBeUndefined();
});
it('should list all queues', () => {
manager.createQueue('queue1');
manager.createQueue('queue2');
const queues = manager.getQueues();
expect(queues).toHaveLength(2);
expect(queues.map(q => q.getName())).toContain('queue1');
expect(queues.map(q => q.getName())).toContain('queue2');
});
it('should check if queue exists', () => {
manager.createQueue('test-queue');
expect(manager.hasQueue('test-queue')).toBe(true);
expect(manager.hasQueue('non-existent')).toBe(false);
});
});
describe('cache operations', () => {
let manager: QueueManager;
beforeEach(() => {
manager = new QueueManager(mockRedisConfig, {}, mockLogger);
});
it('should create cache', () => {
const cache = manager.createCache('test-cache');
expect(cache).toBeDefined();
});
it('should get existing cache', () => {
manager.createCache('test-cache');
const cache = manager.getCache('test-cache');
expect(cache).toBeDefined();
});
it('should return same cache instance', () => {
const cache1 = manager.createCache('test-cache');
const cache2 = manager.createCache('test-cache');
expect(cache1).toBe(cache2);
});
});
describe('service discovery', () => {
let manager: QueueManager;
beforeEach(() => {
manager = new QueueManager(mockRedisConfig, {}, mockLogger);
});
it('should configure service name', () => {
manager.configureService('test-service');
expect((manager as any).serviceName).toBe('test-service');
});
it('should register queue route', () => {
manager.configureService('test-service');
manager.registerQueueRoute({
service: 'remote-service',
handler: 'process',
queueName: '{remote-service_process}',
});
expect(manager.hasRoute('remote-service', 'process')).toBe(true);
});
it('should send to remote queue', async () => {
manager.configureService('test-service');
manager.registerQueueRoute({
service: 'remote-service',
handler: 'process',
queueName: '{remote-service_process}',
});
const jobId = await manager.sendToQueue('remote-service', 'process', { data: 'test' });
expect(jobId).toBeDefined();
});
it('should send to local queue', async () => {
manager.configureService('test-service');
manager.createQueue('{test-service_process}');
const jobId = await manager.sendToQueue('test-service', 'process', { data: 'test' });
expect(jobId).toBeDefined();
});
});
describe('shutdown', () => {
it('should shutdown gracefully', async () => {
const manager = new QueueManager(mockRedisConfig, {}, mockLogger);
manager.createQueue('test-queue');
await manager.shutdown();
expect((manager as any).isShuttingDown).toBe(true);
});
it('should handle multiple shutdown calls', async () => {
const manager = new QueueManager(mockRedisConfig, {}, mockLogger);
const promise1 = manager.shutdown();
const promise2 = manager.shutdown();
expect(promise1).toBe(promise2);
await promise1;
});
});
describe('metrics', () => {
it('should get global stats', async () => {
const manager = new QueueManager(
mockRedisConfig,
{
enableMetrics: true,
},
mockLogger
);
manager.createQueue('queue1');
manager.createQueue('queue2');
const stats = await manager.getGlobalStats();
expect(stats).toBeDefined();
expect(stats.totalQueues).toBe(2);
});
it('should get queue stats', async () => {
const manager = new QueueManager(
mockRedisConfig,
{
enableMetrics: true,
},
mockLogger
);
const queue = manager.createQueue('test-queue');
const stats = await manager.getQueueStats('test-queue');
expect(stats).toBeDefined();
expect(stats.name).toBe('test-queue');
});
});
describe('rate limiting', () => {
it('should apply rate limit rules', () => {
const manager = new QueueManager(
mockRedisConfig,
{
rateLimiter: {
rules: [
{
name: 'api-limit',
max: 100,
duration: 60000,
scope: 'global',
},
],
},
},
mockLogger
);
const rateLimiter = (manager as any).rateLimiter;
expect(rateLimiter).toBeDefined();
});
});
});

View file

@ -1,6 +1,6 @@
import type { Job, Queue, QueueEvents } from 'bullmq';
import { beforeEach, describe, expect, it, mock, type Mock } from 'bun:test';
import { QueueMetricsCollector } from '../src/queue-metrics';
import type { Queue, QueueEvents, Job } from 'bullmq';
describe('QueueMetricsCollector', () => {
let metrics: QueueMetricsCollector;
@ -34,7 +34,10 @@ describe('QueueMetricsCollector', () => {
on: mock(() => {}),
};
metrics = new QueueMetricsCollector(mockQueue as unknown as Queue, mockQueueEvents as unknown as QueueEvents);
metrics = new QueueMetricsCollector(
mockQueue as unknown as Queue,
mockQueueEvents as unknown as QueueEvents
);
});
describe('collect metrics', () => {
@ -46,7 +49,9 @@ describe('QueueMetricsCollector', () => {
mockQueue.getDelayedCount.mockImplementation(() => Promise.resolve(1));
// Add some completed timestamps to avoid 100% failure rate
const completedHandler = mockQueueEvents.on.mock.calls.find(call => call[0] === 'completed')?.[1];
const completedHandler = mockQueueEvents.on.mock.calls.find(
call => call[0] === 'completed'
)?.[1];
if (completedHandler) {
for (let i = 0; i < 50; i++) {
completedHandler();
@ -118,17 +123,14 @@ describe('QueueMetricsCollector', () => {
completedTimestamps: number[];
failedTimestamps: number[];
};
const now = Date.now();
metricsWithPrivate.completedTimestamps = [
now - 30000, // 30 seconds ago
now - 20000,
now - 10000,
];
metricsWithPrivate.failedTimestamps = [
now - 25000,
now - 5000,
];
metricsWithPrivate.failedTimestamps = [now - 25000, now - 5000];
const result = await metrics.collect();
@ -146,7 +148,9 @@ describe('QueueMetricsCollector', () => {
mockQueue.getFailedCount.mockImplementation(() => Promise.resolve(3));
// Add some completed timestamps to make it healthy
const completedHandler = mockQueueEvents.on.mock.calls.find(call => call[0] === 'completed')?.[1];
const completedHandler = mockQueueEvents.on.mock.calls.find(
call => call[0] === 'completed'
)?.[1];
if (completedHandler) {
for (let i = 0; i < 50; i++) {
completedHandler();
@ -174,9 +178,13 @@ describe('QueueMetricsCollector', () => {
const prometheusMetrics = await metrics.getPrometheusMetrics();
expect(prometheusMetrics).toContain('# HELP queue_jobs_total');
expect(prometheusMetrics).toContain('queue_jobs_total{queue="test-queue",status="waiting"} 5');
expect(prometheusMetrics).toContain(
'queue_jobs_total{queue="test-queue",status="waiting"} 5'
);
expect(prometheusMetrics).toContain('queue_jobs_total{queue="test-queue",status="active"} 2');
expect(prometheusMetrics).toContain('queue_jobs_total{queue="test-queue",status="completed"} 100');
expect(prometheusMetrics).toContain(
'queue_jobs_total{queue="test-queue",status="completed"} 100'
);
expect(prometheusMetrics).toContain('# HELP queue_processing_time_seconds');
expect(prometheusMetrics).toContain('# HELP queue_throughput_per_minute');
expect(prometheusMetrics).toContain('# HELP queue_health');
@ -189,7 +197,10 @@ describe('QueueMetricsCollector', () => {
on: mock<(event: string, handler: Function) => void>(() => {}),
};
new QueueMetricsCollector(mockQueue as unknown as Queue, newMockQueueEvents as unknown as QueueEvents);
new QueueMetricsCollector(
mockQueue as unknown as Queue,
newMockQueueEvents as unknown as QueueEvents
);
expect(newMockQueueEvents.on).toHaveBeenCalledWith('completed', expect.any(Function));
expect(newMockQueueEvents.on).toHaveBeenCalledWith('failed', expect.any(Function));
@ -219,4 +230,4 @@ describe('QueueMetricsCollector', () => {
expect(result.oldestWaitingJob).toBeNull();
});
});
});
});

View file

@ -1,203 +1,203 @@
import { beforeEach, describe, expect, it, mock } from 'bun:test';
import {
normalizeServiceName,
generateCachePrefix,
getFullQueueName,
parseQueueName,
} from '../src/service-utils';
import { ServiceCache, createServiceCache } from '../src/service-cache';
import type { BatchJobData } from '../src/types';
describe('Service Utilities', () => {
describe('normalizeServiceName', () => {
it('should normalize service names', () => {
expect(normalizeServiceName('MyService')).toBe('my-service');
expect(normalizeServiceName('webApi')).toBe('web-api');
expect(normalizeServiceName('dataIngestion')).toBe('data-ingestion');
expect(normalizeServiceName('data-pipeline')).toBe('data-pipeline');
expect(normalizeServiceName('UPPERCASE')).toBe('uppercase');
});
it('should handle empty string', () => {
expect(normalizeServiceName('')).toBe('');
});
it('should handle special characters', () => {
// The function only handles camelCase, not special characters
expect(normalizeServiceName('my@service#123')).toBe('my@service#123');
expect(normalizeServiceName('serviceWithCamelCase')).toBe('service-with-camel-case');
});
});
describe('generateCachePrefix', () => {
it('should generate cache prefix', () => {
expect(generateCachePrefix('service')).toBe('cache:service');
expect(generateCachePrefix('webApi')).toBe('cache:web-api');
});
it('should handle empty parts', () => {
expect(generateCachePrefix('')).toBe('cache:');
});
});
describe('getFullQueueName', () => {
it('should generate full queue name', () => {
expect(getFullQueueName('service', 'handler')).toBe('{service_handler}');
expect(getFullQueueName('webApi', 'handler')).toBe('{web-api_handler}');
});
it('should normalize service name', () => {
expect(getFullQueueName('MyService', 'handler')).toBe('{my-service_handler}');
});
});
describe('parseQueueName', () => {
it('should parse queue name', () => {
expect(parseQueueName('{service_handler}')).toEqual({
service: 'service',
handler: 'handler',
});
expect(parseQueueName('{web-api_data-processor}')).toEqual({
service: 'web-api',
handler: 'data-processor',
});
});
it('should handle invalid formats', () => {
expect(parseQueueName('service:handler')).toBeNull();
expect(parseQueueName('service')).toBeNull();
expect(parseQueueName('')).toBeNull();
});
it('should handle edge cases', () => {
expect(parseQueueName('{}_handler')).toBeNull();
expect(parseQueueName('{service_}')).toBeNull();
expect(parseQueueName('not-a-valid-format')).toBeNull();
});
});
});
describe('ServiceCache', () => {
it('should create service cache', () => {
const mockRedisConfig = {
host: 'localhost',
port: 6379,
};
// Since ServiceCache constructor internally creates a real cache,
// we can't easily test it without mocking the createCache function
// For now, just test that the function exists and returns something
const serviceCache = createServiceCache('myservice', mockRedisConfig);
expect(serviceCache).toBeDefined();
expect(serviceCache).toBeInstanceOf(ServiceCache);
});
it('should handle cache prefix correctly', () => {
const mockRedisConfig = {
host: 'localhost',
port: 6379,
};
const serviceCache = createServiceCache('webApi', mockRedisConfig);
expect(serviceCache).toBeDefined();
// The prefix is set internally as cache:web-api
expect(serviceCache.getKey('test')).toBe('cache:web-api:test');
});
it('should support global cache option', () => {
const mockRedisConfig = {
host: 'localhost',
port: 6379,
};
const globalCache = createServiceCache('myservice', mockRedisConfig, { global: true });
expect(globalCache).toBeDefined();
// Global cache uses a different prefix
expect(globalCache.getKey('test')).toBe('stock-bot:shared:test');
});
});
describe('Batch Processing', () => {
it('should handle batch job data types', () => {
const batchJob: BatchJobData = {
items: [1, 2, 3],
options: {
batchSize: 10,
concurrency: 2,
},
};
expect(batchJob.items).toHaveLength(3);
expect(batchJob.options.batchSize).toBe(10);
expect(batchJob.options.concurrency).toBe(2);
});
it('should process batch results', () => {
const results = {
totalItems: 10,
successful: 8,
failed: 2,
errors: [
{ item: 5, error: 'Failed to process' },
{ item: 7, error: 'Invalid data' },
],
duration: 1000,
};
expect(results.successful + results.failed).toBe(results.totalItems);
expect(results.errors).toHaveLength(results.failed);
});
});
describe('Rate Limiting', () => {
it('should validate rate limit config', () => {
const config = {
rules: [
{
name: 'default',
maxJobs: 100,
window: 60000,
},
{
name: 'api',
maxJobs: 10,
window: 1000,
},
],
};
expect(config.rules).toHaveLength(2);
expect(config.rules[0].name).toBe('default');
expect(config.rules[1].maxJobs).toBe(10);
});
});
describe('Queue Types', () => {
it('should validate job data structure', () => {
const jobData = {
handler: 'TestHandler',
operation: 'process',
payload: { data: 'test' },
};
expect(jobData.handler).toBe('TestHandler');
expect(jobData.operation).toBe('process');
expect(jobData.payload).toBeDefined();
});
it('should validate queue stats structure', () => {
const stats = {
waiting: 10,
active: 2,
completed: 100,
failed: 5,
delayed: 3,
paused: false,
workers: 4,
};
expect(stats.waiting + stats.active + stats.completed + stats.failed + stats.delayed).toBe(120);
expect(stats.paused).toBe(false);
expect(stats.workers).toBe(4);
});
});
import { beforeEach, describe, expect, it, mock } from 'bun:test';
import { createServiceCache, ServiceCache } from '../src/service-cache';
import {
generateCachePrefix,
getFullQueueName,
normalizeServiceName,
parseQueueName,
} from '../src/service-utils';
import type { BatchJobData } from '../src/types';
describe('Service Utilities', () => {
describe('normalizeServiceName', () => {
it('should normalize service names', () => {
expect(normalizeServiceName('MyService')).toBe('my-service');
expect(normalizeServiceName('webApi')).toBe('web-api');
expect(normalizeServiceName('dataIngestion')).toBe('data-ingestion');
expect(normalizeServiceName('data-pipeline')).toBe('data-pipeline');
expect(normalizeServiceName('UPPERCASE')).toBe('uppercase');
});
it('should handle empty string', () => {
expect(normalizeServiceName('')).toBe('');
});
it('should handle special characters', () => {
// The function only handles camelCase, not special characters
expect(normalizeServiceName('my@service#123')).toBe('my@service#123');
expect(normalizeServiceName('serviceWithCamelCase')).toBe('service-with-camel-case');
});
});
describe('generateCachePrefix', () => {
it('should generate cache prefix', () => {
expect(generateCachePrefix('service')).toBe('cache:service');
expect(generateCachePrefix('webApi')).toBe('cache:web-api');
});
it('should handle empty parts', () => {
expect(generateCachePrefix('')).toBe('cache:');
});
});
describe('getFullQueueName', () => {
it('should generate full queue name', () => {
expect(getFullQueueName('service', 'handler')).toBe('{service_handler}');
expect(getFullQueueName('webApi', 'handler')).toBe('{web-api_handler}');
});
it('should normalize service name', () => {
expect(getFullQueueName('MyService', 'handler')).toBe('{my-service_handler}');
});
});
describe('parseQueueName', () => {
it('should parse queue name', () => {
expect(parseQueueName('{service_handler}')).toEqual({
service: 'service',
handler: 'handler',
});
expect(parseQueueName('{web-api_data-processor}')).toEqual({
service: 'web-api',
handler: 'data-processor',
});
});
it('should handle invalid formats', () => {
expect(parseQueueName('service:handler')).toBeNull();
expect(parseQueueName('service')).toBeNull();
expect(parseQueueName('')).toBeNull();
});
it('should handle edge cases', () => {
expect(parseQueueName('{}_handler')).toBeNull();
expect(parseQueueName('{service_}')).toBeNull();
expect(parseQueueName('not-a-valid-format')).toBeNull();
});
});
});
describe('ServiceCache', () => {
it('should create service cache', () => {
const mockRedisConfig = {
host: 'localhost',
port: 6379,
};
// Since ServiceCache constructor internally creates a real cache,
// we can't easily test it without mocking the createCache function
// For now, just test that the function exists and returns something
const serviceCache = createServiceCache('myservice', mockRedisConfig);
expect(serviceCache).toBeDefined();
expect(serviceCache).toBeInstanceOf(ServiceCache);
});
it('should handle cache prefix correctly', () => {
const mockRedisConfig = {
host: 'localhost',
port: 6379,
};
const serviceCache = createServiceCache('webApi', mockRedisConfig);
expect(serviceCache).toBeDefined();
// The prefix is set internally as cache:web-api
expect(serviceCache.getKey('test')).toBe('cache:web-api:test');
});
it('should support global cache option', () => {
const mockRedisConfig = {
host: 'localhost',
port: 6379,
};
const globalCache = createServiceCache('myservice', mockRedisConfig, { global: true });
expect(globalCache).toBeDefined();
// Global cache uses a different prefix
expect(globalCache.getKey('test')).toBe('stock-bot:shared:test');
});
});
describe('Batch Processing', () => {
it('should handle batch job data types', () => {
const batchJob: BatchJobData = {
items: [1, 2, 3],
options: {
batchSize: 10,
concurrency: 2,
},
};
expect(batchJob.items).toHaveLength(3);
expect(batchJob.options.batchSize).toBe(10);
expect(batchJob.options.concurrency).toBe(2);
});
it('should process batch results', () => {
const results = {
totalItems: 10,
successful: 8,
failed: 2,
errors: [
{ item: 5, error: 'Failed to process' },
{ item: 7, error: 'Invalid data' },
],
duration: 1000,
};
expect(results.successful + results.failed).toBe(results.totalItems);
expect(results.errors).toHaveLength(results.failed);
});
});
describe('Rate Limiting', () => {
it('should validate rate limit config', () => {
const config = {
rules: [
{
name: 'default',
maxJobs: 100,
window: 60000,
},
{
name: 'api',
maxJobs: 10,
window: 1000,
},
],
};
expect(config.rules).toHaveLength(2);
expect(config.rules[0].name).toBe('default');
expect(config.rules[1].maxJobs).toBe(10);
});
});
describe('Queue Types', () => {
it('should validate job data structure', () => {
const jobData = {
handler: 'TestHandler',
operation: 'process',
payload: { data: 'test' },
};
expect(jobData.handler).toBe('TestHandler');
expect(jobData.operation).toBe('process');
expect(jobData.payload).toBeDefined();
});
it('should validate queue stats structure', () => {
const stats = {
waiting: 10,
active: 2,
completed: 100,
failed: 5,
delayed: 3,
paused: false,
workers: 4,
};
expect(stats.waiting + stats.active + stats.completed + stats.failed + stats.delayed).toBe(120);
expect(stats.paused).toBe(false);
expect(stats.workers).toBe(4);
});
});

View file

@ -32,7 +32,7 @@ describe('QueueRateLimiter', () => {
describe('addRule', () => {
it('should add a rate limit rule', () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
const rule: RateLimitRule = {
level: 'queue',
queueName: 'test-queue',
@ -55,7 +55,7 @@ describe('QueueRateLimiter', () => {
it('should add operation-level rule', () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
const rule: RateLimitRule = {
level: 'operation',
queueName: 'test-queue',
@ -86,7 +86,7 @@ describe('QueueRateLimiter', () => {
it('should check against global rule', async () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
const globalRule: RateLimitRule = {
level: 'global',
config: { points: 1000, duration: 60 },
@ -110,7 +110,7 @@ describe('QueueRateLimiter', () => {
it('should prefer more specific rules', async () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
// Add rules from least to most specific
const globalRule: RateLimitRule = {
level: 'global',
@ -161,7 +161,7 @@ describe('QueueRateLimiter', () => {
describe('getStatus', () => {
it('should get rate limit status', async () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
const rule: RateLimitRule = {
level: 'queue',
queueName: 'test-queue',
@ -171,7 +171,7 @@ describe('QueueRateLimiter', () => {
limiter.addRule(rule);
const status = await limiter.getStatus('test-queue', 'handler', 'operation');
expect(status.queueName).toBe('test-queue');
expect(status.handler).toBe('handler');
expect(status.operation).toBe('operation');
@ -182,7 +182,7 @@ describe('QueueRateLimiter', () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
const status = await limiter.getStatus('test-queue', 'handler', 'operation');
expect(status.queueName).toBe('test-queue');
expect(status.appliedRule).toBeUndefined();
expect(status.limit).toBeUndefined();
@ -192,7 +192,7 @@ describe('QueueRateLimiter', () => {
describe('reset', () => {
it('should reset rate limits for specific operation', async () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
const rule: RateLimitRule = {
level: 'operation',
queueName: 'test-queue',
@ -229,7 +229,7 @@ describe('QueueRateLimiter', () => {
describe('removeRule', () => {
it('should remove a rule', () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
const rule: RateLimitRule = {
level: 'queue',
queueName: 'test-queue',
@ -255,7 +255,7 @@ describe('QueueRateLimiter', () => {
describe('getRules', () => {
it('should return all configured rules', () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
const rule1: RateLimitRule = {
level: 'global',
config: { points: 1000, duration: 60 },
@ -280,7 +280,7 @@ describe('QueueRateLimiter', () => {
describe('error handling', () => {
it('should allow on rate limiter error', async () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
// Add a rule but don't set up the actual limiter to cause an error
const rule: RateLimitRule = {
level: 'queue',
@ -294,7 +294,7 @@ describe('QueueRateLimiter', () => {
(limiter as any).limiters.clear();
const result = await limiter.checkLimit('test-queue', 'handler', 'operation');
expect(result.allowed).toBe(true); // Should allow on error
expect(mockLogger.warn).toHaveBeenCalledWith(
'Rate limiter not found for rule',
@ -306,7 +306,7 @@ describe('QueueRateLimiter', () => {
describe('hierarchical rule precedence', () => {
it('should correctly apply rule hierarchy', () => {
const limiter = new QueueRateLimiter(mockRedisClient, mockLogger);
// Add multiple rules at different levels
const rules: RateLimitRule[] = [
{
@ -346,4 +346,4 @@ describe('QueueRateLimiter', () => {
expect(specificRule?.config.points).toBe(10);
});
});
})
});

View file

@ -1,9 +1,9 @@
import { describe, expect, it } from 'bun:test';
import { normalizeServiceName, generateCachePrefix } from '../src/service-utils';
import { generateCachePrefix, normalizeServiceName } from '../src/service-utils';
describe('ServiceCache Integration', () => {
// Since ServiceCache depends on external createCache, we'll test the utility functions it uses
describe('generateCachePrefix usage', () => {
it('should generate correct cache prefix for service', () => {
const prefix = generateCachePrefix('userService');
@ -49,9 +49,9 @@ describe('ServiceCache Integration', () => {
const serviceName = 'UserService';
const normalized = normalizeServiceName(serviceName);
expect(normalized).toBe('user-service');
const prefix = generateCachePrefix(normalized);
expect(prefix).toBe('cache:user-service');
});
});
})
});

View file

@ -1,8 +1,8 @@
import { describe, expect, it } from 'bun:test';
import {
normalizeServiceName,
generateCachePrefix,
getFullQueueName,
normalizeServiceName,
parseQueueName,
} from '../src/service-utils';
@ -95,9 +95,9 @@ describe('Service Utils', () => {
const serviceName = 'userService';
const handlerName = 'processOrder';
const queueName = getFullQueueName(serviceName, handlerName);
expect(queueName).toBe('{user-service_processOrder}');
// Parse it back
const parsed = parseQueueName(queueName);
expect(parsed).toEqual({
@ -109,12 +109,12 @@ describe('Service Utils', () => {
it('should handle cache prefix generation', () => {
const serviceName = 'orderService';
const cachePrefix = generateCachePrefix(serviceName);
expect(cachePrefix).toBe('cache:order-service');
// Use it for cache keys
const cacheKey = `${cachePrefix}:user:123`;
expect(cacheKey).toBe('cache:order-service:user:123');
});
});
})
});

View file

@ -1,6 +1,6 @@
import { describe, expect, it, beforeEach, afterEach } from 'bun:test';
import { getRedisConnection } from '../src/utils';
import { afterEach, beforeEach, describe, expect, it } from 'bun:test';
import type { RedisConfig } from '../src/types';
import { getRedisConnection } from '../src/utils';
describe('Queue Utils', () => {
describe('getRedisConnection', () => {
@ -16,7 +16,7 @@ describe('Queue Utils', () => {
it('should return test connection in test environment', () => {
process.env.NODE_ENV = 'test';
const config: RedisConfig = {
host: 'production.redis.com',
port: 6380,
@ -32,7 +32,7 @@ describe('Queue Utils', () => {
it('should return test connection when BUNIT is set', () => {
process.env.BUNIT = '1';
const config: RedisConfig = {
host: 'production.redis.com',
port: 6380,
@ -47,7 +47,7 @@ describe('Queue Utils', () => {
it('should return actual config in non-test environment', () => {
process.env.NODE_ENV = 'production';
delete process.env.BUNIT;
const config: RedisConfig = {
host: 'production.redis.com',
port: 6380,
@ -72,7 +72,7 @@ describe('Queue Utils', () => {
it('should handle minimal config', () => {
process.env.NODE_ENV = 'development';
const config: RedisConfig = {
host: 'localhost',
port: 6379,
@ -89,7 +89,7 @@ describe('Queue Utils', () => {
it('should preserve all config properties in non-test mode', () => {
delete process.env.NODE_ENV;
delete process.env.BUNIT;
const config: RedisConfig = {
host: 'redis.example.com',
port: 6379,
@ -115,4 +115,4 @@ describe('Queue Utils', () => {
expect(connection.username).toBe('admin'); // Preserved from original
});
});
})
});