still trying

This commit is contained in:
Boki 2025-06-10 22:16:11 -04:00
parent 682b50d3b2
commit 716c90060a
4 changed files with 110 additions and 130 deletions

View file

@ -30,6 +30,8 @@ export interface BatchResult {
// Cache instance for payload storage
let cacheProvider: CacheProvider | null = null;
let cacheInitialized = false;
let cacheInitPromise: Promise<void> | null = null;
function getCache(): CacheProvider {
if (!cacheProvider) {
@ -42,6 +44,29 @@ function getCache(): CacheProvider {
return cacheProvider;
}
async function ensureCacheReady(): Promise<void> {
if (cacheInitialized) {
return;
}
if (cacheInitPromise) {
return cacheInitPromise;
}
cacheInitPromise = (async () => {
const cache = getCache();
try {
await cache.waitForReady(10000);
cacheInitialized = true;
} catch (error) {
logger.warn('Cache initialization timeout, proceeding anyway', { error });
// Don't throw - let operations continue with potential fallback
}
})();
return cacheInitPromise;
}
/**
* Main function - processes items either directly or in batches
*/
@ -163,9 +188,9 @@ async function processBatched<T>(
name: 'process-batch',
data: {
type: 'process-batch',
service: 'batch-processor',
provider: 'batch',
operation: 'process-batch-items',
service: options.service || 'generic',
provider: options.provider || 'generic',
operation: options.operation || 'generic',
payload: {
payloadKey,
batchIndex,
@ -222,9 +247,9 @@ export async function processBatchJob(jobData: any, queue: QueueService): Promis
name: 'process-item',
data: {
type: 'process-item',
service: options.service || 'data-service',
service: options.service || 'generic',
provider: options.provider || 'generic',
operation: options.operation || 'process-item',
operation: options.operation || 'generic',
payload: processor(item, index),
priority: options.priority || 1
},
@ -267,11 +292,10 @@ async function storePayload<T>(
processor: (item: T, index: number) => any,
options: ProcessOptions
): Promise<string> {
// Ensure cache is ready using shared initialization
await ensureCacheReady();
const cache = getCache();
// Wait for cache to be ready before storing
await cache.waitForReady(5000);
const key = `payload_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
const payload = {
@ -282,9 +306,9 @@ async function storePayload<T>(
priority: options.priority || 1,
retries: options.retries || 3,
// Store routing information for later use
service: options.service || 'data-service',
service: options.service || 'generic',
provider: options.provider || 'generic',
operation: options.operation || 'process-item'
operation: options.operation || 'generic'
},
createdAt: Date.now()
};
@ -306,10 +330,10 @@ async function storePayload<T>(
}
async function loadPayload(key: string): Promise<any> {
const cache = getCache();
// Ensure cache is ready using shared initialization
await ensureCacheReady();
// Wait for cache to be ready before loading
await cache.waitForReady(5000);
const cache = getCache();
logger.debug('Loading batch payload', {
key,
@ -365,70 +389,4 @@ async function addJobsInChunks(queue: QueueService, jobs: any[], chunkSize = 100
return allCreatedJobs;
}
// Convenience functions for common use cases
export async function processSymbols(
symbols: string[],
queue: QueueService,
options: {
operation: string;
service: string;
provider: string;
totalDelayMs: number;
useBatching?: boolean;
batchSize?: number;
priority?: number;
}
): Promise<BatchResult> {
return processItems(
symbols,
(symbol, index) => ({
symbol,
index,
source: 'batch-processing'
}),
queue,
{
totalDelayMs: options.totalDelayMs,
batchSize: options.batchSize || 100,
priority: options.priority || 1,
useBatching: options.useBatching || false,
service: options.service,
provider: options.provider,
operation: options.operation
}
);
}
export async function processProxies(
proxies: any[],
queue: QueueService,
options: {
totalDelayMs: number;
useBatching?: boolean;
batchSize?: number;
priority?: number;
service?: string;
provider?: string;
operation?: string;
}
): Promise<BatchResult> {
return processItems(
proxies,
(proxy, index) => ({
proxy,
index,
source: 'batch-processing'
}),
queue,
{
totalDelayMs: options.totalDelayMs,
batchSize: options.batchSize || 200,
priority: options.priority || 2,
useBatching: options.useBatching || true,
service: options.service || 'data-service',
provider: options.provider || 'proxy-service',
operation: options.operation || 'check-proxy'
}
);
}