seperating batch payload from job queue
This commit is contained in:
parent
c57733ebca
commit
b974753d8b
5 changed files with 360 additions and 25 deletions
33
libs/cache/src/providers/hybrid-cache.ts
vendored
33
libs/cache/src/providers/hybrid-cache.ts
vendored
|
|
@ -258,4 +258,37 @@ export class HybridCache implements CacheProvider {
|
|||
await this.redisCache.disconnect();
|
||||
this.logger.info('Hybrid cache disconnected');
|
||||
}
|
||||
|
||||
async waitForReady(timeout: number = 5000): Promise<void> {
|
||||
// Memory cache is always ready, only need to wait for Redis
|
||||
await this.redisCache.waitForReady(timeout);
|
||||
}
|
||||
|
||||
isReady(): boolean {
|
||||
// Memory cache is always ready, check Redis status
|
||||
return this.memoryCache.isReady() && this.redisCache.isReady();
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually trigger a refresh of the Redis cache for a specific key
|
||||
* Useful for updating the cache after a data change
|
||||
*/
|
||||
async refresh(key: string): Promise<void> {
|
||||
try {
|
||||
// Get the current value from memory (L1)
|
||||
const currentValue = await this.memoryCache.get(key);
|
||||
if (currentValue !== null) {
|
||||
// If exists in memory, update Redis (L2)
|
||||
await this.redisCache.set(key, currentValue);
|
||||
this.logger.info('Cache refresh (L2)', { key });
|
||||
} else {
|
||||
this.logger.debug('Cache refresh skipped, key not found in L1', { key });
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Cache refresh error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
21
libs/cache/src/providers/memory-cache.ts
vendored
21
libs/cache/src/providers/memory-cache.ts
vendored
|
|
@ -256,4 +256,25 @@ export class MemoryCache implements CacheProvider {
|
|||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
async waitForReady(timeout: number = 5000): Promise<void> {
|
||||
// Memory cache is always ready immediately
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
isReady(): boolean {
|
||||
// Memory cache is always ready
|
||||
return true;
|
||||
}
|
||||
|
||||
private getMemoryUsage(): number {
|
||||
// Rough estimation of memory usage in bytes
|
||||
let bytes = 0;
|
||||
for (const [key, entry] of this.store.entries()) {
|
||||
bytes += key.length * 2; // UTF-16 characters
|
||||
bytes += JSON.stringify(entry.value).length * 2;
|
||||
bytes += 24; // Overhead for entry object
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
38
libs/cache/src/providers/redis-cache.ts
vendored
38
libs/cache/src/providers/redis-cache.ts
vendored
|
|
@ -253,6 +253,44 @@ export class RedisCache implements CacheProvider {
|
|||
return this.get<number[]>(key);
|
||||
}
|
||||
|
||||
async waitForReady(timeout: number = 5000): Promise<void> {
|
||||
if (this.isConnected) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timeoutId = setTimeout(() => {
|
||||
reject(new Error(`Redis cache connection timeout after ${timeout}ms`));
|
||||
}, timeout);
|
||||
|
||||
const onReady = () => {
|
||||
clearTimeout(timeoutId);
|
||||
this.redis.off('ready', onReady);
|
||||
this.redis.off('error', onError);
|
||||
resolve();
|
||||
};
|
||||
|
||||
const onError = (error: Error) => {
|
||||
clearTimeout(timeoutId);
|
||||
this.redis.off('ready', onReady);
|
||||
this.redis.off('error', onError);
|
||||
reject(new Error(`Redis cache connection failed: ${error.message}`));
|
||||
};
|
||||
|
||||
if (this.redis.status === 'ready') {
|
||||
clearTimeout(timeoutId);
|
||||
resolve();
|
||||
} else {
|
||||
this.redis.once('ready', onReady);
|
||||
this.redis.once('error', onError);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
isReady(): boolean {
|
||||
return this.isConnected && this.redis.status === 'ready';
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the Redis connection
|
||||
*/
|
||||
|
|
|
|||
12
libs/cache/src/types.ts
vendored
12
libs/cache/src/types.ts
vendored
|
|
@ -6,6 +6,18 @@ export interface CacheProvider {
|
|||
clear(): Promise<void>;
|
||||
getStats(): CacheStats;
|
||||
health(): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Wait for the cache to be ready and connected
|
||||
* @param timeout Maximum time to wait in milliseconds (default: 5000)
|
||||
* @returns Promise that resolves when cache is ready
|
||||
*/
|
||||
waitForReady(timeout?: number): Promise<void>;
|
||||
|
||||
/**
|
||||
* Check if the cache is currently ready
|
||||
*/
|
||||
isReady(): boolean;
|
||||
}
|
||||
|
||||
export interface CacheOptions {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue