311 lines
9 KiB
TypeScript
311 lines
9 KiB
TypeScript
import { beforeEach, describe, expect, it, mock, type Mock } from 'bun:test';
|
|
import type { Logger } from '@stock-bot/logger';
|
|
import { processBatchJob, processItems } from '../src/batch-processor';
|
|
import type { BatchJobData, ProcessOptions, Queue, QueueManager } from '../src/types';
|
|
|
|
describe('Batch Processor', () => {
|
|
type MockLogger = {
|
|
info: Mock<(message: string, meta?: any) => void>;
|
|
error: Mock<(message: string, meta?: any) => void>;
|
|
warn: Mock<(message: string, meta?: any) => void>;
|
|
debug: Mock<(message: string, meta?: any) => void>;
|
|
trace: Mock<(message: string, meta?: any) => void>;
|
|
};
|
|
|
|
type MockQueue = {
|
|
add: Mock<(name: string, data: any, options?: any) => Promise<{ id: string }>>;
|
|
addBulk: Mock<
|
|
(jobs: Array<{ name: string; data: any; opts?: any }>) => Promise<Array<{ id: string }>>
|
|
>;
|
|
createChildLogger: Mock<(component: string, meta?: any) => MockLogger>;
|
|
getName: Mock<() => string>;
|
|
};
|
|
|
|
type MockQueueManager = {
|
|
getQueue: Mock<(name: string) => MockQueue>;
|
|
getCache: Mock<
|
|
(name: string) => {
|
|
get: Mock<(key: string) => Promise<any>>;
|
|
set: Mock<(key: string, value: any, ttl?: number) => Promise<void>>;
|
|
del: Mock<(key: string) => Promise<void>>;
|
|
}
|
|
>;
|
|
};
|
|
|
|
let mockLogger: MockLogger;
|
|
let mockQueue: MockQueue;
|
|
let mockQueueManager: MockQueueManager;
|
|
let mockCache: {
|
|
get: Mock<(key: string) => Promise<any>>;
|
|
set: Mock<(key: string, value: any, ttl?: number) => Promise<void>>;
|
|
del: Mock<(key: string) => Promise<void>>;
|
|
};
|
|
|
|
beforeEach(() => {
|
|
mockLogger = {
|
|
info: mock(() => {}),
|
|
error: mock(() => {}),
|
|
warn: mock(() => {}),
|
|
debug: mock(() => {}),
|
|
trace: mock(() => {}),
|
|
};
|
|
|
|
mockQueue = {
|
|
add: mock(async () => ({ id: 'job-123' })),
|
|
addBulk: mock(async jobs => jobs.map((_, i) => ({ id: `job-${i + 1}` }))),
|
|
createChildLogger: mock(() => mockLogger),
|
|
getName: mock(() => 'test-queue'),
|
|
};
|
|
|
|
mockCache = {
|
|
get: mock(async () => null),
|
|
set: mock(async () => {}),
|
|
del: mock(async () => {}),
|
|
};
|
|
|
|
mockQueueManager = {
|
|
getQueue: mock(() => mockQueue),
|
|
getCache: mock(() => mockCache),
|
|
};
|
|
});
|
|
|
|
describe('processBatchJob', () => {
|
|
it('should process all items successfully', async () => {
|
|
const batchData: BatchJobData = {
|
|
payloadKey: 'test-payload-key',
|
|
batchIndex: 0,
|
|
totalBatches: 1,
|
|
itemCount: 3,
|
|
totalDelayHours: 0,
|
|
};
|
|
|
|
// Mock the cached payload
|
|
const cachedPayload = {
|
|
items: ['item1', 'item2', 'item3'],
|
|
options: {
|
|
batchSize: 2,
|
|
concurrency: 1,
|
|
},
|
|
};
|
|
mockCache.get.mockImplementation(async () => cachedPayload);
|
|
|
|
const result = await processBatchJob(
|
|
batchData,
|
|
'test-queue',
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(mockCache.get).toHaveBeenCalledWith('test-payload-key');
|
|
expect(mockQueue.addBulk).toHaveBeenCalled();
|
|
expect(result).toBeDefined();
|
|
});
|
|
|
|
it('should handle partial failures', async () => {
|
|
const batchData: BatchJobData = {
|
|
payloadKey: 'test-payload-key',
|
|
batchIndex: 0,
|
|
totalBatches: 1,
|
|
itemCount: 3,
|
|
totalDelayHours: 0,
|
|
};
|
|
|
|
// Mock the cached payload
|
|
const cachedPayload = {
|
|
items: ['item1', 'item2', 'item3'],
|
|
options: {},
|
|
};
|
|
mockCache.get.mockImplementation(async () => cachedPayload);
|
|
|
|
// Make addBulk throw an error to simulate failure
|
|
mockQueue.addBulk.mockImplementation(async () => {
|
|
throw new Error('Failed to add jobs');
|
|
});
|
|
|
|
// processBatchJob should still complete even if addBulk fails
|
|
const result = await processBatchJob(
|
|
batchData,
|
|
'test-queue',
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(mockQueue.addBulk).toHaveBeenCalled();
|
|
// The error is logged in addJobsInChunks, not in processBatchJob
|
|
expect(mockLogger.error).toHaveBeenCalledWith('Failed to add job chunk', expect.any(Object));
|
|
});
|
|
|
|
it('should handle empty items', async () => {
|
|
const batchData: BatchJobData = {
|
|
payloadKey: 'test-payload-key',
|
|
batchIndex: 0,
|
|
totalBatches: 1,
|
|
itemCount: 0,
|
|
totalDelayHours: 0,
|
|
};
|
|
|
|
// Mock the cached payload with empty items
|
|
const cachedPayload = {
|
|
items: [],
|
|
options: {},
|
|
};
|
|
mockCache.get.mockImplementation(async () => cachedPayload);
|
|
|
|
const result = await processBatchJob(
|
|
batchData,
|
|
'test-queue',
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(mockQueue.addBulk).not.toHaveBeenCalled();
|
|
expect(result).toBeDefined();
|
|
});
|
|
|
|
it('should track duration', async () => {
|
|
const batchData: BatchJobData = {
|
|
payloadKey: 'test-payload-key',
|
|
batchIndex: 0,
|
|
totalBatches: 1,
|
|
itemCount: 1,
|
|
totalDelayHours: 0,
|
|
};
|
|
|
|
// Mock the cached payload
|
|
const cachedPayload = {
|
|
items: ['item1'],
|
|
options: {},
|
|
};
|
|
mockCache.get.mockImplementation(async () => cachedPayload);
|
|
|
|
// Add delay to queue.add
|
|
mockQueue.add.mockImplementation(
|
|
() => new Promise(resolve => setTimeout(() => resolve({ id: 'job-1' }), 10))
|
|
);
|
|
|
|
const result = await processBatchJob(
|
|
batchData,
|
|
'test-queue',
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(result).toBeDefined();
|
|
// The function doesn't return duration in its result
|
|
});
|
|
});
|
|
|
|
describe('processItems', () => {
|
|
it('should process items with default options', async () => {
|
|
const items = [1, 2, 3, 4, 5];
|
|
const options: ProcessOptions = { totalDelayHours: 0 };
|
|
|
|
const result = await processItems(
|
|
items,
|
|
'test-queue',
|
|
options,
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(result.totalItems).toBe(5);
|
|
expect(result.jobsCreated).toBe(5);
|
|
expect(result.mode).toBe('direct');
|
|
expect(mockQueue.addBulk).toHaveBeenCalled();
|
|
});
|
|
|
|
it('should process items in batches', async () => {
|
|
const items = [1, 2, 3, 4, 5];
|
|
const options: ProcessOptions = {
|
|
totalDelayHours: 0,
|
|
useBatching: true,
|
|
batchSize: 2,
|
|
};
|
|
|
|
const result = await processItems(
|
|
items,
|
|
'test-queue',
|
|
options,
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(result.totalItems).toBe(5);
|
|
expect(result.mode).toBe('batch');
|
|
// When batching is enabled, it creates batch jobs instead of individual jobs
|
|
expect(mockQueue.addBulk).toHaveBeenCalled();
|
|
});
|
|
|
|
it('should handle concurrent processing', async () => {
|
|
const items = [1, 2, 3, 4];
|
|
const options: ProcessOptions = {
|
|
totalDelayHours: 0,
|
|
};
|
|
|
|
const result = await processItems(
|
|
items,
|
|
'test-queue',
|
|
options,
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(result.totalItems).toBe(4);
|
|
expect(result.jobsCreated).toBe(4);
|
|
expect(mockQueue.addBulk).toHaveBeenCalled();
|
|
});
|
|
|
|
it('should handle empty array', async () => {
|
|
const items: number[] = [];
|
|
const options: ProcessOptions = { totalDelayHours: 0 };
|
|
|
|
const result = await processItems(
|
|
items,
|
|
'test-queue',
|
|
options,
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(result.totalItems).toBe(0);
|
|
expect(result.jobsCreated).toBe(0);
|
|
expect(result.mode).toBe('direct');
|
|
expect(mockQueue.addBulk).not.toHaveBeenCalled();
|
|
});
|
|
|
|
it('should propagate errors', async () => {
|
|
const items = [1, 2, 3];
|
|
const options: ProcessOptions = { totalDelayHours: 0 };
|
|
|
|
// Make queue.addBulk throw an error
|
|
mockQueue.addBulk.mockImplementation(async () => {
|
|
throw new Error('Process error');
|
|
});
|
|
|
|
// processItems catches errors and continues, so it won't reject
|
|
const result = await processItems(
|
|
items,
|
|
'test-queue',
|
|
options,
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(result.jobsCreated).toBe(0);
|
|
expect(mockQueue.addBulk).toHaveBeenCalled();
|
|
expect(mockLogger.error).toHaveBeenCalledWith('Failed to add job chunk', expect.any(Object));
|
|
});
|
|
|
|
it('should process large batches efficiently', async () => {
|
|
const items = Array.from({ length: 100 }, (_, i) => i);
|
|
const options: ProcessOptions = {
|
|
totalDelayHours: 0,
|
|
useBatching: true,
|
|
batchSize: 20,
|
|
};
|
|
|
|
const result = await processItems(
|
|
items,
|
|
'test-queue',
|
|
options,
|
|
mockQueueManager as unknown as QueueManager
|
|
);
|
|
|
|
expect(result.totalItems).toBe(100);
|
|
expect(result.mode).toBe('batch');
|
|
// With batching enabled and batch size 20, we should have 5 batch jobs
|
|
expect(mockQueue.addBulk).toHaveBeenCalled();
|
|
});
|
|
});
|
|
});
|