queue work

This commit is contained in:
Boki 2025-06-19 08:22:00 -04:00
parent c05a7413dc
commit d3ef73ae00
9 changed files with 938 additions and 1086 deletions

View file

@ -1,48 +1,20 @@
import { CacheProvider, createCache } from '@stock-bot/cache';
import { getLogger } from '@stock-bot/logger';
import type { Queue } from './queue-instance';
import { QueueManager } from './queue-manager';
import type { BatchJobData, BatchResult, JobData, ProcessOptions } from './types';
const logger = getLogger('batch-processor');
const cacheProviders = new Map<string, CacheProvider>();
function getCache(queueName: string, redisConfig: any): CacheProvider {
if (!cacheProviders.has(queueName)) {
const cacheProvider = createCache({
redisConfig,
keyPrefix: `batch:${queueName}:`,
ttl: 86400, // 24 hours default
enableMetrics: true,
});
cacheProviders.set(queueName, cacheProvider);
}
return cacheProviders.get(queueName) as CacheProvider;
}
/**
* Initialize the batch cache before any batch operations
* This should be called during application startup
*/
export async function initializeBatchCache(queue: Queue): Promise<void> {
const queueName = queue.getName();
const redisConfig = queue.getRedisConfig();
logger.info('Initializing batch cache...', { queueName });
const cache = getCache(queueName, redisConfig);
await cache.waitForReady(10000);
logger.info('Batch cache initialized successfully', { queueName });
}
/**
* Main function - processes items either directly or in batches
* Each item becomes payload: item (no processing needed)
*/
export async function processItems<T>(
items: T[],
queue: Queue,
queueName: string,
options: ProcessOptions
): Promise<BatchResult> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue(queueName);
const startTime = Date.now();
if (items.length === 0) {
@ -63,8 +35,8 @@ export async function processItems<T>(
try {
const result = options.useBatching
? await processBatched(items, queue, options)
: await processDirect(items, queue, options);
? await processBatched(items, queueName, options)
: await processDirect(items, queueName, options);
const duration = Date.now() - startTime;
@ -85,9 +57,11 @@ export async function processItems<T>(
*/
async function processDirect<T>(
items: T[],
queue: Queue,
queueName: string,
options: ProcessOptions
): Promise<Omit<BatchResult, 'duration'>> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue(queueName);
const totalDelayMs = options.totalDelayHours * 60 * 60 * 1000; // Convert hours to milliseconds
const delayPerItem = totalDelayMs / items.length;
@ -114,7 +88,7 @@ async function processDirect<T>(
},
}));
const createdJobs = await addJobsInChunks(queue, jobs);
const createdJobs = await addJobsInChunks(queueName, jobs);
return {
@ -129,9 +103,11 @@ async function processDirect<T>(
*/
async function processBatched<T>(
items: T[],
queue: Queue,
queueName: string,
options: ProcessOptions
): Promise<Omit<BatchResult, 'duration'>> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue(queueName);
const batchSize = options.batchSize || 100;
const batches = createBatches(items, batchSize);
const totalDelayMs = options.totalDelayHours * 60 * 60 * 1000; // Convert hours to milliseconds
@ -147,7 +123,7 @@ async function processBatched<T>(
const batchJobs = await Promise.all(
batches.map(async (batch, batchIndex) => {
// Just store the items directly - no processing needed
const payloadKey = await storeItems(batch, queue, options);
const payloadKey = await storeItems(batch, queueName, options);
return {
name: 'process-batch',
@ -174,7 +150,7 @@ async function processBatched<T>(
})
);
const createdJobs = await addJobsInChunks(queue, batchJobs);
const createdJobs = await addJobsInChunks(queueName, batchJobs);
return {
totalItems: items.length,
@ -189,8 +165,10 @@ async function processBatched<T>(
*/
export async function processBatchJob(
jobData: BatchJobData,
queue: Queue
queueName: string
): Promise<unknown> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue(queueName);
const { payloadKey, batchIndex, totalBatches, itemCount } = jobData;
logger.debug('Processing batch job', {
@ -200,7 +178,7 @@ export async function processBatchJob(
});
try {
const payload = await loadPayload(payloadKey, queue);
const payload = await loadPayload(payloadKey, queueName);
if (!payload || !payload.items || !payload.options) {
logger.error('Invalid payload data', { payloadKey, payload });
throw new Error(`Invalid payload data for key: ${payloadKey}`);
@ -225,10 +203,10 @@ export async function processBatchJob(
},
}));
const createdJobs = await addJobsInChunks(queue, jobs);
const createdJobs = await addJobsInChunks(queueName, jobs);
// Cleanup payload after successful processing
await cleanupPayload(payloadKey, queue);
await cleanupPayload(payloadKey, queueName);
return {
batchIndex,
@ -253,14 +231,11 @@ function createBatches<T>(items: T[], batchSize: number): T[][] {
async function storeItems<T>(
items: T[],
queue: Queue,
queueName: string,
options: ProcessOptions
): Promise<string> {
if (!queue) {
throw new Error('Batch cache not initialized. Call initializeBatchCache() first.');
}
const cache = getCache(queue.getName(), queue.getRedisConfig());
const queueManager = QueueManager.getInstance();
const cache = queueManager.getCache(queueName);
const payloadKey = `payload:${Date.now()}:${Math.random().toString(36).substr(2, 9)}`;
const payload = {
@ -283,7 +258,7 @@ async function storeItems<T>(
async function loadPayload<T>(
key: string,
queue: Queue
queueName: string
): Promise<{
items: T[];
options: {
@ -294,11 +269,8 @@ async function loadPayload<T>(
operation: string;
};
} | null> {
if (!queue) {
throw new Error('Batch cache not initialized. Call initializeBatchCache() first.');
}
const cache = getCache(queue.getName(), queue.getRedisConfig());
const queueManager = QueueManager.getInstance();
const cache = queueManager.getCache(queueName);
return (await cache.get(key)) as {
items: T[];
options: {
@ -311,20 +283,19 @@ async function loadPayload<T>(
} | null;
}
async function cleanupPayload(key: string, queue: Queue): Promise<void> {
if (!queue) {
throw new Error('Batch cache not initialized. Call initializeBatchCache() first.');
}
const cache = getCache(queue.getName(), queue.getRedisConfig());
async function cleanupPayload(key: string, queueName: string): Promise<void> {
const queueManager = QueueManager.getInstance();
const cache = queueManager.getCache(queueName);
await cache.del(key);
}
async function addJobsInChunks(
queue: Queue,
queueName: string,
jobs: Array<{ name: string; data: JobData; opts?: Record<string, unknown> }>,
chunkSize = 100
): Promise<unknown[]> {
const queueManager = QueueManager.getInstance();
const queue = queueManager.getQueue(queueName);
const allCreatedJobs = [];
for (let i = 0; i < jobs.length; i += chunkSize) {