cleaned up cache lib
This commit is contained in:
parent
ee57b66391
commit
e98b1d8ae2
9 changed files with 637 additions and 1209 deletions
194
libs/cache/src/connection-manager.ts
vendored
Normal file
194
libs/cache/src/connection-manager.ts
vendored
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
import Redis from 'ioredis';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { dragonflyConfig } from '@stock-bot/config';
|
||||
|
||||
interface ConnectionConfig {
|
||||
name: string;
|
||||
singleton?: boolean;
|
||||
db?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Redis Connection Manager for managing shared and unique connections
|
||||
*/
|
||||
export class RedisConnectionManager {
|
||||
private connections = new Map<string, Redis>();
|
||||
private static sharedConnections = new Map<string, Redis>();
|
||||
private static instance: RedisConnectionManager;
|
||||
private logger = getLogger('redis-connection-manager');
|
||||
|
||||
// Singleton pattern for the manager itself
|
||||
static getInstance(): RedisConnectionManager {
|
||||
if (!this.instance) {
|
||||
this.instance = new RedisConnectionManager();
|
||||
}
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create a Redis connection
|
||||
* @param config Connection configuration
|
||||
* @returns Redis connection instance
|
||||
*/
|
||||
getConnection(config: ConnectionConfig): Redis {
|
||||
const { name, singleton = false, db } = config;
|
||||
|
||||
if (singleton) {
|
||||
// Use shared connection across all instances
|
||||
if (!RedisConnectionManager.sharedConnections.has(name)) {
|
||||
const connection = this.createConnection(name, db);
|
||||
RedisConnectionManager.sharedConnections.set(name, connection);
|
||||
this.logger.info(`Created shared Redis connection: ${name}`);
|
||||
}
|
||||
return RedisConnectionManager.sharedConnections.get(name)!;
|
||||
} else {
|
||||
// Create unique connection per instance
|
||||
const uniqueName = `${name}-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
||||
const connection = this.createConnection(uniqueName, db);
|
||||
this.connections.set(uniqueName, connection);
|
||||
this.logger.info(`Created unique Redis connection: ${uniqueName}`);
|
||||
return connection;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new Redis connection with configuration
|
||||
*/
|
||||
private createConnection(name: string, db?: number): Redis {
|
||||
const redisConfig = {
|
||||
host: dragonflyConfig.DRAGONFLY_HOST,
|
||||
port: dragonflyConfig.DRAGONFLY_PORT,
|
||||
password: dragonflyConfig.DRAGONFLY_PASSWORD || undefined,
|
||||
username: dragonflyConfig.DRAGONFLY_USERNAME || undefined,
|
||||
db: db ?? dragonflyConfig.DRAGONFLY_DATABASE,
|
||||
maxRetriesPerRequest: dragonflyConfig.DRAGONFLY_MAX_RETRIES,
|
||||
retryDelayOnFailover: dragonflyConfig.DRAGONFLY_RETRY_DELAY,
|
||||
connectTimeout: dragonflyConfig.DRAGONFLY_CONNECT_TIMEOUT,
|
||||
commandTimeout: dragonflyConfig.DRAGONFLY_COMMAND_TIMEOUT,
|
||||
keepAlive: dragonflyConfig.DRAGONFLY_ENABLE_KEEPALIVE ? dragonflyConfig.DRAGONFLY_KEEPALIVE_INTERVAL * 1000 : 0,
|
||||
connectionName: name,
|
||||
lazyConnect: true,
|
||||
...(dragonflyConfig.DRAGONFLY_TLS && {
|
||||
tls: {
|
||||
cert: dragonflyConfig.DRAGONFLY_TLS_CERT_FILE || undefined,
|
||||
key: dragonflyConfig.DRAGONFLY_TLS_KEY_FILE || undefined,
|
||||
ca: dragonflyConfig.DRAGONFLY_TLS_CA_FILE || undefined,
|
||||
rejectUnauthorized: !dragonflyConfig.DRAGONFLY_TLS_SKIP_VERIFY,
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
const redis = new Redis(redisConfig);
|
||||
|
||||
// Setup event handlers
|
||||
redis.on('connect', () => {
|
||||
this.logger.info(`Redis connection established: ${name}`);
|
||||
});
|
||||
|
||||
redis.on('ready', () => {
|
||||
this.logger.info(`Redis connection ready: ${name}`);
|
||||
});
|
||||
|
||||
redis.on('error', (err) => {
|
||||
this.logger.error(`Redis connection error for ${name}:`, err);
|
||||
});
|
||||
|
||||
redis.on('close', () => {
|
||||
this.logger.info(`Redis connection closed: ${name}`);
|
||||
});
|
||||
|
||||
redis.on('reconnecting', () => {
|
||||
this.logger.info(`Redis reconnecting: ${name}`);
|
||||
});
|
||||
|
||||
return redis;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close a specific connection
|
||||
*/
|
||||
async closeConnection(connection: Redis): Promise<void> {
|
||||
try {
|
||||
await connection.quit();
|
||||
} catch (error) {
|
||||
this.logger.error('Error closing Redis connection:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close all connections managed by this instance
|
||||
*/
|
||||
async closeAllConnections(): Promise<void> {
|
||||
// Close instance-specific connections
|
||||
const instancePromises = Array.from(this.connections.values()).map(conn =>
|
||||
this.closeConnection(conn)
|
||||
);
|
||||
await Promise.all(instancePromises);
|
||||
this.connections.clear();
|
||||
|
||||
// Close shared connections (only if this is the last instance)
|
||||
if (RedisConnectionManager.instance === this) {
|
||||
const sharedPromises = Array.from(RedisConnectionManager.sharedConnections.values()).map(conn =>
|
||||
this.closeConnection(conn)
|
||||
);
|
||||
await Promise.all(sharedPromises);
|
||||
RedisConnectionManager.sharedConnections.clear();
|
||||
}
|
||||
|
||||
this.logger.info('All Redis connections closed');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get connection statistics
|
||||
*/
|
||||
getConnectionCount(): { shared: number; unique: number } {
|
||||
return {
|
||||
shared: RedisConnectionManager.sharedConnections.size,
|
||||
unique: this.connections.size
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all connection names for monitoring
|
||||
*/
|
||||
getConnectionNames(): { shared: string[]; unique: string[] } {
|
||||
return {
|
||||
shared: Array.from(RedisConnectionManager.sharedConnections.keys()),
|
||||
unique: Array.from(this.connections.keys())
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Health check for all connections
|
||||
*/
|
||||
async healthCheck(): Promise<{ healthy: boolean; details: Record<string, boolean> }> {
|
||||
const details: Record<string, boolean> = {};
|
||||
let allHealthy = true;
|
||||
|
||||
// Check shared connections
|
||||
for (const [name, connection] of RedisConnectionManager.sharedConnections) {
|
||||
try {
|
||||
await connection.ping();
|
||||
details[`shared:${name}`] = true;
|
||||
} catch (error) {
|
||||
details[`shared:${name}`] = false;
|
||||
allHealthy = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check instance connections
|
||||
for (const [name, connection] of this.connections) {
|
||||
try {
|
||||
await connection.ping();
|
||||
details[`unique:${name}`] = true;
|
||||
} catch (error) {
|
||||
details[`unique:${name}`] = false;
|
||||
allHealthy = false;
|
||||
}
|
||||
}
|
||||
|
||||
return { healthy: allHealthy, details };
|
||||
}
|
||||
}
|
||||
|
||||
export default RedisConnectionManager;
|
||||
265
libs/cache/src/decorators/cacheable.ts
vendored
265
libs/cache/src/decorators/cacheable.ts
vendored
|
|
@ -1,265 +0,0 @@
|
|||
import { getLogger } from '@stock-bot/logger';
|
||||
import { CacheProvider } from '../types';
|
||||
import { CacheKeyGenerator } from '../key-generator';
|
||||
|
||||
const logger = getLogger('cache-decorator');
|
||||
|
||||
/**
|
||||
* Method decorator for automatic caching
|
||||
*/
|
||||
export function Cacheable(
|
||||
cacheProvider: CacheProvider,
|
||||
options: {
|
||||
keyGenerator?: (args: any[], target?: any, methodName?: string) => string;
|
||||
ttl?: number;
|
||||
skipFirstArg?: boolean; // Skip 'this' if it's the first argument
|
||||
} = {}
|
||||
) {
|
||||
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
|
||||
const originalMethod = descriptor.value;
|
||||
|
||||
descriptor.value = async function (...args: any[]) {
|
||||
try {
|
||||
// Generate cache key
|
||||
const key = options.keyGenerator
|
||||
? options.keyGenerator(args, target, propertyName)
|
||||
: generateDefaultKey(target.constructor.name, propertyName, args);
|
||||
|
||||
// Try to get from cache
|
||||
const cached = await cacheProvider.get(key);
|
||||
if (cached !== null) {
|
||||
logger.debug('Method cache hit', {
|
||||
class: target.constructor.name,
|
||||
method: propertyName,
|
||||
key
|
||||
});
|
||||
return cached;
|
||||
}
|
||||
|
||||
// Execute method and cache result
|
||||
const result = await originalMethod.apply(this, args);
|
||||
await cacheProvider.set(key, result, options.ttl);
|
||||
|
||||
logger.debug('Method executed and cached', {
|
||||
class: target.constructor.name,
|
||||
method: propertyName,
|
||||
key
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('Cache decorator error', {
|
||||
class: target.constructor.name,
|
||||
method: propertyName,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
|
||||
// Fallback to original method if caching fails
|
||||
return await originalMethod.apply(this, args);
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache invalidation decorator
|
||||
*/
|
||||
export function CacheEvict(
|
||||
cacheProvider: CacheProvider,
|
||||
options: {
|
||||
keyGenerator?: (args: any[], target?: any, methodName?: string) => string | string[];
|
||||
evictBefore?: boolean; // Evict before method execution
|
||||
} = {}
|
||||
) {
|
||||
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
|
||||
const originalMethod = descriptor.value;
|
||||
|
||||
descriptor.value = async function (...args: any[]) {
|
||||
try {
|
||||
const keys = options.keyGenerator
|
||||
? options.keyGenerator(args, target, propertyName)
|
||||
: generateDefaultKey(target.constructor.name, propertyName, args);
|
||||
|
||||
const keysArray = Array.isArray(keys) ? keys : [keys];
|
||||
|
||||
if (options.evictBefore) {
|
||||
// Evict before method execution
|
||||
for (const key of keysArray) {
|
||||
await cacheProvider.del(key);
|
||||
}
|
||||
logger.debug('Cache evicted before method execution', {
|
||||
class: target.constructor.name,
|
||||
method: propertyName,
|
||||
keys: keysArray
|
||||
});
|
||||
}
|
||||
|
||||
// Execute method
|
||||
const result = await originalMethod.apply(this, args);
|
||||
|
||||
if (!options.evictBefore) {
|
||||
// Evict after method execution
|
||||
for (const key of keysArray) {
|
||||
await cacheProvider.del(key);
|
||||
}
|
||||
logger.debug('Cache evicted after method execution', {
|
||||
class: target.constructor.name,
|
||||
method: propertyName,
|
||||
keys: keysArray
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
logger.error('Cache evict decorator error', {
|
||||
class: target.constructor.name,
|
||||
method: propertyName,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
|
||||
// Continue with original method execution even if eviction fails
|
||||
return await originalMethod.apply(this, args);
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache warming decorator - pre-populate cache with method results
|
||||
*/
|
||||
export function CacheWarm(
|
||||
cacheProvider: CacheProvider,
|
||||
options: {
|
||||
keyGenerator?: (args: any[], target?: any, methodName?: string) => string;
|
||||
ttl?: number;
|
||||
warmupArgs: any[][]; // Array of argument arrays to warm up
|
||||
}
|
||||
) {
|
||||
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
|
||||
const originalMethod = descriptor.value;
|
||||
|
||||
// Warmup cache when method is first accessed
|
||||
let warmed = false;
|
||||
|
||||
descriptor.value = async function (...args: any[]) {
|
||||
// Perform warmup if not done yet
|
||||
if (!warmed) {
|
||||
warmed = true;
|
||||
setImmediate(async () => {
|
||||
try {
|
||||
for (const warmupArgs of options.warmupArgs) {
|
||||
const key = options.keyGenerator
|
||||
? options.keyGenerator(warmupArgs, target, propertyName)
|
||||
: generateDefaultKey(target.constructor.name, propertyName, warmupArgs);
|
||||
|
||||
// Check if already cached
|
||||
const exists = await cacheProvider.exists(key);
|
||||
if (!exists) {
|
||||
const result = await originalMethod.apply(this, warmupArgs);
|
||||
await cacheProvider.set(key, result, options.ttl);
|
||||
}
|
||||
}
|
||||
logger.info('Cache warmed up', {
|
||||
class: target.constructor.name,
|
||||
method: propertyName,
|
||||
count: options.warmupArgs.length
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Cache warmup failed', {
|
||||
class: target.constructor.name,
|
||||
method: propertyName,
|
||||
error
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Execute normal cacheable logic
|
||||
const key = options.keyGenerator
|
||||
? options.keyGenerator(args, target, propertyName)
|
||||
: generateDefaultKey(target.constructor.name, propertyName, args);
|
||||
|
||||
const cached = await cacheProvider.get(key);
|
||||
if (cached !== null) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
const result = await originalMethod.apply(this, args);
|
||||
await cacheProvider.set(key, result, options.ttl);
|
||||
|
||||
return result;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Trading-specific decorators
|
||||
*/
|
||||
|
||||
/**
|
||||
* Cache market data with appropriate TTL
|
||||
*/
|
||||
export function CacheMarketData(
|
||||
cacheProvider: CacheProvider,
|
||||
ttl: number = 300 // 5 minutes default
|
||||
) {
|
||||
return Cacheable(cacheProvider, {
|
||||
keyGenerator: (args) => {
|
||||
const [symbol, timeframe, date] = args;
|
||||
return CacheKeyGenerator.marketData(symbol, timeframe, date);
|
||||
},
|
||||
ttl
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache technical indicators
|
||||
*/
|
||||
export function CacheIndicator(
|
||||
cacheProvider: CacheProvider,
|
||||
ttl: number = 600 // 10 minutes default
|
||||
) {
|
||||
return Cacheable(cacheProvider, {
|
||||
keyGenerator: (args) => {
|
||||
const [symbol, indicator, period, data] = args;
|
||||
const dataHash = hashArray(data);
|
||||
return CacheKeyGenerator.indicator(symbol, indicator, period, dataHash);
|
||||
},
|
||||
ttl
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache strategy results
|
||||
*/
|
||||
export function CacheStrategy(
|
||||
cacheProvider: CacheProvider,
|
||||
ttl: number = 1800 // 30 minutes default
|
||||
) {
|
||||
return Cacheable(cacheProvider, {
|
||||
keyGenerator: (args) => {
|
||||
const [strategyName, symbol, timeframe] = args;
|
||||
return CacheKeyGenerator.strategy(strategyName, symbol, timeframe);
|
||||
},
|
||||
ttl
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper functions
|
||||
*/
|
||||
function generateDefaultKey(className: string, methodName: string, args: any[]): string {
|
||||
const argsHash = hashArray(args);
|
||||
return `method:${className}:${methodName}:${argsHash}`;
|
||||
}
|
||||
|
||||
function hashArray(arr: any[]): string {
|
||||
const str = JSON.stringify(arr);
|
||||
let hash = 0;
|
||||
for (let i = 0; i < str.length; i++) {
|
||||
const char = str.charCodeAt(i);
|
||||
hash = ((hash << 5) - hash) + char;
|
||||
hash = hash & hash; // Convert to 32-bit integer
|
||||
}
|
||||
return Math.abs(hash).toString(36);
|
||||
}
|
||||
83
libs/cache/src/index.ts
vendored
83
libs/cache/src/index.ts
vendored
|
|
@ -1,76 +1,50 @@
|
|||
import { dragonflyConfig } from '@stock-bot/config';
|
||||
import { RedisCache } from './providers/redis-cache';
|
||||
import { MemoryCache } from './providers/memory-cache';
|
||||
import { HybridCache } from './providers/hybrid-cache';
|
||||
import type { CacheProvider, CacheOptions, CacheConfig } from './types';
|
||||
import { RedisCache } from './redis-cache';
|
||||
import { RedisConnectionManager } from './connection-manager';
|
||||
import type { CacheProvider, CacheOptions } from './types';
|
||||
|
||||
/**
|
||||
* Factory for creating cache providers with smart defaults
|
||||
*
|
||||
* @param type 'redis' | 'memory' | 'hybrid' | 'auto'
|
||||
* @param options configuration for the cache
|
||||
* Create a Redis cache instance with trading-optimized defaults
|
||||
*/
|
||||
export function createCache(
|
||||
type: 'redis' | 'memory' | 'hybrid' | 'auto' = 'auto',
|
||||
options: CacheOptions = {}
|
||||
): CacheProvider {
|
||||
// Auto-detect best cache type based on environment
|
||||
if (type === 'auto') {
|
||||
try {
|
||||
// Try to use hybrid cache if Redis/Dragonfly is configured
|
||||
if (dragonflyConfig.DRAGONFLY_HOST) {
|
||||
type = 'hybrid';
|
||||
} else {
|
||||
type = 'memory';
|
||||
}
|
||||
} catch {
|
||||
// Fallback to memory if config is not available
|
||||
type = 'memory';
|
||||
}
|
||||
}
|
||||
export function createCache(options: Partial<CacheOptions> = {}): CacheProvider {
|
||||
const defaultOptions: CacheOptions = {
|
||||
keyPrefix: 'cache:',
|
||||
ttl: 3600, // 1 hour default
|
||||
enableMetrics: true,
|
||||
shared: true, // Default to shared connections
|
||||
...options
|
||||
};
|
||||
|
||||
switch (type) {
|
||||
case 'redis':
|
||||
return new RedisCache(options);
|
||||
case 'memory':
|
||||
return new MemoryCache(options);
|
||||
case 'hybrid':
|
||||
return new HybridCache(options);
|
||||
default:
|
||||
throw new Error(`Unknown cache type: ${type}`);
|
||||
}
|
||||
return new RedisCache(defaultOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a cache instance with trading-optimized defaults
|
||||
* Create a cache instance for trading data
|
||||
*/
|
||||
export function createTradingCache(options: Partial<CacheOptions> = {}): CacheProvider {
|
||||
const defaultOptions: CacheOptions = {
|
||||
keyPrefix: 'trading:',
|
||||
ttl: 3600, // 1 hour default
|
||||
memoryTTL: 300, // 5 minutes for memory cache
|
||||
maxMemoryItems: 2000, // More items for trading data
|
||||
enableMetrics: true,
|
||||
shared: true,
|
||||
...options
|
||||
};
|
||||
|
||||
return createCache('auto', defaultOptions);
|
||||
return new RedisCache(defaultOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a cache for market data with appropriate settings
|
||||
* Create a cache for market data with shorter TTL
|
||||
*/
|
||||
export function createMarketDataCache(options: Partial<CacheOptions> = {}): CacheProvider {
|
||||
const defaultOptions: CacheOptions = {
|
||||
keyPrefix: 'market:',
|
||||
ttl: 300, // 5 minutes for market data
|
||||
memoryTTL: 60, // 1 minute in memory
|
||||
maxMemoryItems: 5000, // Lots of market data
|
||||
enableMetrics: true,
|
||||
shared: true,
|
||||
...options
|
||||
};
|
||||
|
||||
return createCache('auto', defaultOptions);
|
||||
return new RedisCache(defaultOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -80,13 +54,12 @@ export function createIndicatorCache(options: Partial<CacheOptions> = {}): Cache
|
|||
const defaultOptions: CacheOptions = {
|
||||
keyPrefix: 'indicators:',
|
||||
ttl: 1800, // 30 minutes for indicators
|
||||
memoryTTL: 600, // 10 minutes in memory
|
||||
maxMemoryItems: 1000,
|
||||
enableMetrics: true,
|
||||
shared: true,
|
||||
...options
|
||||
};
|
||||
|
||||
return createCache('auto', defaultOptions);
|
||||
return new RedisCache(defaultOptions);
|
||||
}
|
||||
|
||||
// Export types and classes
|
||||
|
|
@ -99,20 +72,10 @@ export type {
|
|||
SerializationOptions
|
||||
} from './types';
|
||||
|
||||
export { RedisCache } from './providers/redis-cache';
|
||||
export { MemoryCache } from './providers/memory-cache';
|
||||
export { HybridCache } from './providers/hybrid-cache';
|
||||
|
||||
export { RedisCache } from './redis-cache';
|
||||
export { RedisConnectionManager } from './connection-manager';
|
||||
export { CacheKeyGenerator } from './key-generator';
|
||||
|
||||
export {
|
||||
Cacheable,
|
||||
CacheEvict,
|
||||
CacheWarm,
|
||||
CacheMarketData,
|
||||
CacheIndicator,
|
||||
CacheStrategy
|
||||
} from './decorators/cacheable';
|
||||
|
||||
// Default export for convenience
|
||||
export default createCache;
|
||||
294
libs/cache/src/providers/hybrid-cache.ts
vendored
294
libs/cache/src/providers/hybrid-cache.ts
vendored
|
|
@ -1,294 +0,0 @@
|
|||
import { getLogger } from '@stock-bot/logger';
|
||||
import { CacheProvider, CacheOptions, CacheStats } from '../types';
|
||||
import { RedisCache } from './redis-cache';
|
||||
import { MemoryCache } from './memory-cache';
|
||||
|
||||
/**
|
||||
* Hybrid cache provider that uses memory as L1 cache and Redis as L2 cache
|
||||
* Provides the best of both worlds: fast memory access and persistent Redis storage
|
||||
*/
|
||||
export class HybridCache implements CacheProvider {
|
||||
private memoryCache: MemoryCache;
|
||||
private redisCache: RedisCache;
|
||||
private logger = getLogger('hybrid-cache');
|
||||
private enableMetrics: boolean;
|
||||
private startTime = Date.now();
|
||||
|
||||
private stats: CacheStats = {
|
||||
hits: 0,
|
||||
misses: 0,
|
||||
errors: 0,
|
||||
hitRate: 0,
|
||||
total: 0,
|
||||
uptime: 0
|
||||
};
|
||||
|
||||
constructor(options: CacheOptions = {}) {
|
||||
this.enableMetrics = options.enableMetrics ?? true;
|
||||
|
||||
// Create L1 (memory) cache with shorter TTL
|
||||
this.memoryCache = new MemoryCache({
|
||||
...options,
|
||||
ttl: options.memoryTTL ?? 300, // 5 minutes for memory
|
||||
maxMemoryItems: options.maxMemoryItems ?? 1000,
|
||||
enableMetrics: false // We'll handle metrics at hybrid level
|
||||
});
|
||||
|
||||
// Create L2 (Redis) cache with longer TTL
|
||||
this.redisCache = new RedisCache({
|
||||
...options,
|
||||
enableMetrics: false // We'll handle metrics at hybrid level
|
||||
});
|
||||
|
||||
this.logger.info('Hybrid cache initialized', {
|
||||
memoryTTL: options.memoryTTL ?? 300,
|
||||
redisTTL: options.ttl ?? 3600,
|
||||
maxMemoryItems: options.maxMemoryItems ?? 1000
|
||||
});
|
||||
}
|
||||
|
||||
private updateStats(hit: boolean, error = false): void {
|
||||
if (!this.enableMetrics) return;
|
||||
|
||||
if (error) {
|
||||
this.stats.errors++;
|
||||
} else if (hit) {
|
||||
this.stats.hits++;
|
||||
} else {
|
||||
this.stats.misses++;
|
||||
}
|
||||
|
||||
this.stats.total = this.stats.hits + this.stats.misses;
|
||||
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
|
||||
this.stats.uptime = Date.now() - this.startTime;
|
||||
}
|
||||
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
try {
|
||||
// Try L1 cache first (memory)
|
||||
const memoryValue = await this.memoryCache.get<T>(key);
|
||||
if (memoryValue !== null) {
|
||||
this.updateStats(true);
|
||||
this.logger.debug('L1 cache hit', { key, hitRate: this.stats.hitRate });
|
||||
return memoryValue;
|
||||
}
|
||||
|
||||
// Try L2 cache (Redis)
|
||||
const redisValue = await this.redisCache.get<T>(key);
|
||||
if (redisValue !== null) {
|
||||
// Populate L1 cache for next access
|
||||
await this.memoryCache.set(key, redisValue);
|
||||
this.updateStats(true);
|
||||
this.logger.debug('L2 cache hit, populating L1', { key, hitRate: this.stats.hitRate });
|
||||
return redisValue;
|
||||
}
|
||||
|
||||
// Complete miss
|
||||
this.updateStats(false);
|
||||
this.logger.debug('Cache miss (both L1 and L2)', { key });
|
||||
return null;
|
||||
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Hybrid cache get error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
|
||||
try {
|
||||
// Set in both caches
|
||||
const memoryPromise = this.memoryCache.set(key, value, Math.min(ttl ?? 300, 300));
|
||||
const redisPromise = this.redisCache.set(key, value, ttl);
|
||||
|
||||
await Promise.allSettled([memoryPromise, redisPromise]);
|
||||
this.logger.debug('Cache set (both L1 and L2)', { key, ttl });
|
||||
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Hybrid cache set error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async del(key: string): Promise<void> {
|
||||
try {
|
||||
// Delete from both caches
|
||||
const memoryPromise = this.memoryCache.del(key);
|
||||
const redisPromise = this.redisCache.del(key);
|
||||
|
||||
await Promise.allSettled([memoryPromise, redisPromise]);
|
||||
this.logger.debug('Cache delete (both L1 and L2)', { key });
|
||||
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Hybrid cache delete error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async exists(key: string): Promise<boolean> {
|
||||
try {
|
||||
// Check memory first, then Redis
|
||||
const memoryExists = await this.memoryCache.exists(key);
|
||||
if (memoryExists) return true;
|
||||
|
||||
return await this.redisCache.exists(key);
|
||||
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Hybrid cache exists error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
try {
|
||||
// Clear both caches
|
||||
const memoryPromise = this.memoryCache.clear();
|
||||
const redisPromise = this.redisCache.clear();
|
||||
|
||||
await Promise.allSettled([memoryPromise, redisPromise]);
|
||||
this.logger.info('Cache cleared (both L1 and L2)');
|
||||
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Hybrid cache clear error', {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async health(): Promise<boolean> {
|
||||
try {
|
||||
const memoryHealthy = await this.memoryCache.health();
|
||||
const redisHealthy = await this.redisCache.health();
|
||||
|
||||
// Hybrid cache is healthy if at least one cache is working
|
||||
const isHealthy = memoryHealthy || redisHealthy;
|
||||
|
||||
this.logger.debug('Hybrid cache health check', {
|
||||
memory: memoryHealthy,
|
||||
redis: redisHealthy,
|
||||
overall: isHealthy
|
||||
});
|
||||
|
||||
return isHealthy;
|
||||
} catch (error) {
|
||||
this.logger.error('Hybrid cache health check failed', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
getStats(): CacheStats {
|
||||
return {
|
||||
...this.stats,
|
||||
uptime: Date.now() - this.startTime
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed stats for both cache layers
|
||||
*/
|
||||
getDetailedStats() {
|
||||
return {
|
||||
hybrid: this.getStats(),
|
||||
memory: this.memoryCache.getStats(),
|
||||
redis: this.redisCache.getStats()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Warm up the memory cache with frequently accessed keys from Redis
|
||||
*/
|
||||
async warmupMemoryCache(keys: string[]): Promise<void> {
|
||||
this.logger.info('Starting memory cache warmup', { keyCount: keys.length });
|
||||
|
||||
let warmed = 0;
|
||||
for (const key of keys) {
|
||||
try {
|
||||
const value = await this.redisCache.get(key);
|
||||
if (value !== null) {
|
||||
await this.memoryCache.set(key, value);
|
||||
warmed++;
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to warm up key', { key, error });
|
||||
}
|
||||
}
|
||||
|
||||
this.logger.info('Memory cache warmup completed', {
|
||||
requested: keys.length,
|
||||
warmed
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync memory cache with Redis for specific keys
|
||||
*/
|
||||
async syncCaches(keys: string[]): Promise<void> {
|
||||
for (const key of keys) {
|
||||
try {
|
||||
const redisValue = await this.redisCache.get(key);
|
||||
if (redisValue !== null) {
|
||||
await this.memoryCache.set(key, redisValue);
|
||||
} else {
|
||||
await this.memoryCache.del(key);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to sync key', { key, error });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Close connections for both caches
|
||||
*/
|
||||
async disconnect(): Promise<void> {
|
||||
await this.redisCache.disconnect();
|
||||
this.logger.info('Hybrid cache disconnected');
|
||||
}
|
||||
|
||||
async waitForReady(timeout: number = 5000): Promise<void> {
|
||||
// Memory cache is always ready, only need to wait for Redis
|
||||
await this.redisCache.waitForReady(timeout);
|
||||
}
|
||||
|
||||
isReady(): boolean {
|
||||
// Memory cache is always ready, check Redis status
|
||||
return this.memoryCache.isReady() && this.redisCache.isReady();
|
||||
}
|
||||
|
||||
/**
|
||||
* Manually trigger a refresh of the Redis cache for a specific key
|
||||
* Useful for updating the cache after a data change
|
||||
*/
|
||||
async refresh(key: string): Promise<void> {
|
||||
try {
|
||||
// Get the current value from memory (L1)
|
||||
const currentValue = await this.memoryCache.get(key);
|
||||
if (currentValue !== null) {
|
||||
// If exists in memory, update Redis (L2)
|
||||
await this.redisCache.set(key, currentValue);
|
||||
this.logger.info('Cache refresh (L2)', { key });
|
||||
} else {
|
||||
this.logger.debug('Cache refresh skipped, key not found in L1', { key });
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.error('Cache refresh error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
280
libs/cache/src/providers/memory-cache.ts
vendored
280
libs/cache/src/providers/memory-cache.ts
vendored
|
|
@ -1,280 +0,0 @@
|
|||
import { getLogger } from '@stock-bot/logger';
|
||||
import { CacheProvider, CacheOptions, CacheStats } from '../types';
|
||||
|
||||
interface CacheEntry<T> {
|
||||
value: T;
|
||||
expiry: number;
|
||||
accessed: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* In-memory cache provider with LRU eviction and comprehensive metrics
|
||||
*/
|
||||
export class MemoryCache implements CacheProvider {
|
||||
private store = new Map<string, CacheEntry<any>>();
|
||||
private logger = getLogger('memory-cache');
|
||||
private defaultTTL: number;
|
||||
private keyPrefix: string;
|
||||
private maxItems: number;
|
||||
private enableMetrics: boolean;
|
||||
private startTime = Date.now();
|
||||
|
||||
private stats: CacheStats = {
|
||||
hits: 0,
|
||||
misses: 0,
|
||||
errors: 0,
|
||||
hitRate: 0,
|
||||
total: 0,
|
||||
uptime: 0
|
||||
};
|
||||
|
||||
constructor(options: CacheOptions = {}) {
|
||||
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
|
||||
this.keyPrefix = options.keyPrefix ?? 'cache:';
|
||||
this.maxItems = options.maxMemoryItems ?? 1000;
|
||||
this.enableMetrics = options.enableMetrics ?? true;
|
||||
|
||||
this.logger.info('Memory cache initialized', {
|
||||
maxItems: this.maxItems,
|
||||
defaultTTL: this.defaultTTL,
|
||||
enableMetrics: this.enableMetrics
|
||||
});
|
||||
|
||||
// Cleanup expired entries every 5 minutes
|
||||
setInterval(() => this.cleanup(), 5 * 60 * 1000);
|
||||
}
|
||||
|
||||
private getKey(key: string): string {
|
||||
return `${this.keyPrefix}${key}`;
|
||||
}
|
||||
|
||||
private updateStats(hit: boolean, error = false): void {
|
||||
if (!this.enableMetrics) return;
|
||||
|
||||
if (error) {
|
||||
this.stats.errors++;
|
||||
} else if (hit) {
|
||||
this.stats.hits++;
|
||||
} else {
|
||||
this.stats.misses++;
|
||||
}
|
||||
|
||||
this.stats.total = this.stats.hits + this.stats.misses;
|
||||
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
|
||||
this.stats.uptime = Date.now() - this.startTime;
|
||||
}
|
||||
|
||||
private cleanup(): void {
|
||||
const now = Date.now();
|
||||
let cleaned = 0;
|
||||
|
||||
for (const [key, entry] of this.store.entries()) {
|
||||
if (entry.expiry < now) {
|
||||
this.store.delete(key);
|
||||
cleaned++;
|
||||
}
|
||||
}
|
||||
|
||||
if (cleaned > 0) {
|
||||
this.logger.debug('Cleaned expired entries', {
|
||||
cleaned,
|
||||
remaining: this.store.size
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private evictLRU(): void {
|
||||
if (this.store.size <= this.maxItems) return;
|
||||
|
||||
// Find least recently accessed item
|
||||
let oldestKey = '';
|
||||
let oldestAccess = Date.now();
|
||||
|
||||
for (const [key, entry] of this.store.entries()) {
|
||||
if (entry.accessed < oldestAccess) {
|
||||
oldestAccess = entry.accessed;
|
||||
oldestKey = key;
|
||||
}
|
||||
}
|
||||
|
||||
if (oldestKey) {
|
||||
this.store.delete(oldestKey);
|
||||
this.logger.debug('Evicted LRU entry', { key: oldestKey });
|
||||
}
|
||||
}
|
||||
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
try {
|
||||
const fullKey = this.getKey(key);
|
||||
const entry = this.store.get(fullKey);
|
||||
|
||||
if (!entry) {
|
||||
this.updateStats(false);
|
||||
this.logger.debug('Cache miss', { key });
|
||||
return null;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
if (entry.expiry < now) {
|
||||
this.store.delete(fullKey);
|
||||
this.updateStats(false);
|
||||
this.logger.debug('Cache miss (expired)', { key });
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update access time for LRU
|
||||
entry.accessed = now;
|
||||
this.updateStats(true);
|
||||
this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate });
|
||||
|
||||
return entry.value;
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Cache get error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
|
||||
try {
|
||||
const fullKey = this.getKey(key);
|
||||
const now = Date.now();
|
||||
const expiry = now + 1000 * (ttl ?? this.defaultTTL);
|
||||
|
||||
// Evict if necessary
|
||||
this.evictLRU();
|
||||
|
||||
this.store.set(fullKey, {
|
||||
value,
|
||||
expiry,
|
||||
accessed: now
|
||||
});
|
||||
|
||||
this.logger.debug('Cache set', { key, ttl: ttl ?? this.defaultTTL });
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Cache set error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async del(key: string): Promise<void> {
|
||||
try {
|
||||
const fullKey = this.getKey(key);
|
||||
const deleted = this.store.delete(fullKey);
|
||||
this.logger.debug('Cache delete', { key, deleted });
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Cache delete error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async exists(key: string): Promise<boolean> {
|
||||
try {
|
||||
const fullKey = this.getKey(key);
|
||||
const entry = this.store.get(fullKey);
|
||||
|
||||
if (!entry) return false;
|
||||
|
||||
// Check if expired
|
||||
if (entry.expiry < Date.now()) {
|
||||
this.store.delete(fullKey);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Cache exists error', {
|
||||
key,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
try {
|
||||
const size = this.store.size;
|
||||
this.store.clear();
|
||||
this.logger.info('Cache cleared', { entriesDeleted: size });
|
||||
} catch (error) {
|
||||
this.updateStats(false, true);
|
||||
this.logger.error('Cache clear error', {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async health(): Promise<boolean> {
|
||||
try {
|
||||
// Simple health check - try to set and get a test value
|
||||
await this.set('__health_check__', 'ok', 1);
|
||||
const result = await this.get('__health_check__');
|
||||
await this.del('__health_check__');
|
||||
return result === 'ok';
|
||||
} catch (error) {
|
||||
this.logger.error('Memory cache health check failed', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
getStats(): CacheStats {
|
||||
return {
|
||||
...this.stats,
|
||||
uptime: Date.now() - this.startTime
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get additional memory cache specific stats
|
||||
*/
|
||||
getMemoryStats() {
|
||||
return {
|
||||
...this.getStats(),
|
||||
entries: this.store.size,
|
||||
maxItems: this.maxItems,
|
||||
memoryUsage: this.estimateMemoryUsage()
|
||||
};
|
||||
}
|
||||
|
||||
private estimateMemoryUsage(): number {
|
||||
// Rough estimation of memory usage in bytes
|
||||
let bytes = 0;
|
||||
for (const [key, entry] of this.store.entries()) {
|
||||
bytes += key.length * 2; // UTF-16 characters
|
||||
bytes += JSON.stringify(entry.value).length * 2;
|
||||
bytes += 24; // Overhead for entry object
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
|
||||
async waitForReady(timeout: number = 5000): Promise<void> {
|
||||
// Memory cache is always ready immediately
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
isReady(): boolean {
|
||||
// Memory cache is always ready
|
||||
return true;
|
||||
}
|
||||
|
||||
private getMemoryUsage(): number {
|
||||
// Rough estimation of memory usage in bytes
|
||||
let bytes = 0;
|
||||
for (const [key, entry] of this.store.entries()) {
|
||||
bytes += key.length * 2; // UTF-16 characters
|
||||
bytes += JSON.stringify(entry.value).length * 2;
|
||||
bytes += 24; // Overhead for entry object
|
||||
}
|
||||
return bytes;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,304 +1,240 @@
|
|||
import Redis from 'ioredis';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { dragonflyConfig } from '@stock-bot/config';
|
||||
import { CacheProvider, CacheOptions, CacheStats } from '../types';
|
||||
|
||||
/**
|
||||
* Redis-based cache provider with comprehensive error handling and metrics
|
||||
*/
|
||||
export class RedisCache implements CacheProvider {
|
||||
private redis: Redis;
|
||||
private logger = getLogger('redis-cache');
|
||||
private defaultTTL: number;
|
||||
private keyPrefix: string;
|
||||
private enableMetrics: boolean;
|
||||
private isConnected = false;
|
||||
private startTime = Date.now();
|
||||
|
||||
private stats: CacheStats = {
|
||||
hits: 0,
|
||||
misses: 0,
|
||||
errors: 0,
|
||||
hitRate: 0,
|
||||
total: 0,
|
||||
uptime: 0
|
||||
}; constructor(options: CacheOptions = {}) {
|
||||
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
|
||||
this.keyPrefix = options.keyPrefix ?? 'cache:';
|
||||
this.enableMetrics = options.enableMetrics ?? true;
|
||||
|
||||
// Generate a connection name for monitoring
|
||||
const baseName = options.name || this.keyPrefix.replace(':', '-').replace(/[^a-zA-Z0-9-]/g, '');
|
||||
const connectionName = `${baseName}-${Date.now()}`;
|
||||
|
||||
const redisConfig = {
|
||||
host: dragonflyConfig.DRAGONFLY_HOST,
|
||||
port: dragonflyConfig.DRAGONFLY_PORT,
|
||||
password: dragonflyConfig.DRAGONFLY_PASSWORD || undefined,
|
||||
username: dragonflyConfig.DRAGONFLY_USERNAME || undefined,
|
||||
db: dragonflyConfig.DRAGONFLY_DATABASE,
|
||||
maxRetriesPerRequest: dragonflyConfig.DRAGONFLY_MAX_RETRIES,
|
||||
retryDelayOnFailover: dragonflyConfig.DRAGONFLY_RETRY_DELAY,
|
||||
connectTimeout: dragonflyConfig.DRAGONFLY_CONNECT_TIMEOUT,
|
||||
commandTimeout: dragonflyConfig.DRAGONFLY_COMMAND_TIMEOUT,
|
||||
keepAlive: dragonflyConfig.DRAGONFLY_ENABLE_KEEPALIVE ? dragonflyConfig.DRAGONFLY_KEEPALIVE_INTERVAL * 1000 : 0,
|
||||
connectionName, // Add connection name for monitoring
|
||||
...(dragonflyConfig.DRAGONFLY_TLS && {
|
||||
tls: {
|
||||
cert: dragonflyConfig.DRAGONFLY_TLS_CERT_FILE || undefined,
|
||||
key: dragonflyConfig.DRAGONFLY_TLS_KEY_FILE || undefined,
|
||||
ca: dragonflyConfig.DRAGONFLY_TLS_CA_FILE || undefined,
|
||||
rejectUnauthorized: !dragonflyConfig.DRAGONFLY_TLS_SKIP_VERIFY,
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
this.redis = new Redis(redisConfig);
|
||||
this.setupEventHandlers();
|
||||
}
|
||||
|
||||
private setupEventHandlers(): void {
|
||||
this.redis.on('connect', () => {
|
||||
this.isConnected = true;
|
||||
this.logger.info('Redis cache connected', {
|
||||
host: dragonflyConfig.DRAGONFLY_HOST,
|
||||
port: dragonflyConfig.DRAGONFLY_PORT,
|
||||
db: dragonflyConfig.DRAGONFLY_DATABASE
|
||||
});
|
||||
});
|
||||
|
||||
this.redis.on('ready', () => {
|
||||
this.logger.info('Redis cache ready for commands');
|
||||
});
|
||||
|
||||
this.redis.on('error', (error) => {
|
||||
this.isConnected = false;
|
||||
this.stats.errors++;
|
||||
this.logger.error('Redis cache connection error', { error: error.message });
|
||||
});
|
||||
|
||||
this.redis.on('close', () => {
|
||||
this.isConnected = false;
|
||||
this.logger.warn('Redis cache connection closed');
|
||||
});
|
||||
|
||||
this.redis.on('reconnecting', () => {
|
||||
this.logger.info('Redis cache reconnecting...');
|
||||
});
|
||||
}
|
||||
|
||||
private getKey(key: string): string {
|
||||
return `${this.keyPrefix}${key}`;
|
||||
}
|
||||
|
||||
private updateStats(hit: boolean, error = false): void {
|
||||
if (!this.enableMetrics) return;
|
||||
|
||||
if (error) {
|
||||
this.stats.errors++;
|
||||
} else if (hit) {
|
||||
this.stats.hits++;
|
||||
} else {
|
||||
this.stats.misses++;
|
||||
}
|
||||
|
||||
this.stats.total = this.stats.hits + this.stats.misses;
|
||||
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
|
||||
this.stats.uptime = Date.now() - this.startTime;
|
||||
}
|
||||
|
||||
private async safeExecute<T>(
|
||||
operation: () => Promise<T>,
|
||||
fallback: T,
|
||||
operationName: string
|
||||
): Promise<T> {
|
||||
if (!this.isConnected) {
|
||||
this.logger.warn(`Redis not connected for ${operationName}, using fallback`);
|
||||
this.updateStats(false, true);
|
||||
return fallback;
|
||||
}
|
||||
|
||||
try {
|
||||
return await operation();
|
||||
} catch (error) {
|
||||
this.logger.error(`Redis ${operationName} failed`, {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
this.updateStats(false, true);
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
return this.safeExecute(
|
||||
async () => {
|
||||
const fullKey = this.getKey(key);
|
||||
const value = await this.redis.get(fullKey);
|
||||
|
||||
if (value === null) {
|
||||
this.updateStats(false);
|
||||
this.logger.debug('Cache miss', { key });
|
||||
return null;
|
||||
}
|
||||
|
||||
this.updateStats(true);
|
||||
this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate });
|
||||
|
||||
try {
|
||||
return JSON.parse(value) as T;
|
||||
} catch {
|
||||
// Return as-is if not valid JSON
|
||||
return value as unknown as T;
|
||||
}
|
||||
},
|
||||
null,
|
||||
'get'
|
||||
);
|
||||
}
|
||||
|
||||
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
|
||||
await this.safeExecute(
|
||||
async () => {
|
||||
const fullKey = this.getKey(key);
|
||||
const serialized = typeof value === 'string' ? value : JSON.stringify(value);
|
||||
const expiry = ttl ?? this.defaultTTL;
|
||||
|
||||
await this.redis.setex(fullKey, expiry, serialized);
|
||||
this.logger.debug('Cache set', { key, ttl: expiry });
|
||||
},
|
||||
undefined,
|
||||
'set'
|
||||
);
|
||||
}
|
||||
|
||||
async del(key: string): Promise<void> {
|
||||
await this.safeExecute(
|
||||
async () => {
|
||||
const fullKey = this.getKey(key);
|
||||
await this.redis.del(fullKey);
|
||||
this.logger.debug('Cache delete', { key });
|
||||
},
|
||||
undefined,
|
||||
'del'
|
||||
);
|
||||
}
|
||||
|
||||
async exists(key: string): Promise<boolean> {
|
||||
return this.safeExecute(
|
||||
async () => {
|
||||
const fullKey = this.getKey(key);
|
||||
const result = await this.redis.exists(fullKey);
|
||||
return result === 1;
|
||||
},
|
||||
false,
|
||||
'exists'
|
||||
);
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
await this.safeExecute(
|
||||
async () => {
|
||||
const pattern = `${this.keyPrefix}*`;
|
||||
const keys = await this.redis.keys(pattern);
|
||||
if (keys.length > 0) {
|
||||
await this.redis.del(...keys);
|
||||
this.logger.info('Cache cleared', { keysDeleted: keys.length });
|
||||
}
|
||||
},
|
||||
undefined,
|
||||
'clear'
|
||||
);
|
||||
}
|
||||
|
||||
async health(): Promise<boolean> {
|
||||
try {
|
||||
const pong = await this.redis.ping();
|
||||
return pong === 'PONG' && this.isConnected;
|
||||
} catch (error) {
|
||||
this.logger.error('Redis health check failed', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
getStats(): CacheStats {
|
||||
return {
|
||||
...this.stats,
|
||||
uptime: Date.now() - this.startTime
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Trading-specific convenience methods
|
||||
*/
|
||||
async cacheMarketData(symbol: string, timeframe: string, data: any[], ttl = 300): Promise<void> {
|
||||
const key = `market:${symbol}:${timeframe}`;
|
||||
await this.set(key, data, ttl);
|
||||
}
|
||||
|
||||
async getMarketData<T>(symbol: string, timeframe: string): Promise<T | null> {
|
||||
const key = `market:${symbol}:${timeframe}`;
|
||||
return this.get<T>(key);
|
||||
}
|
||||
|
||||
async cacheIndicator(
|
||||
symbol: string,
|
||||
indicator: string,
|
||||
period: number,
|
||||
data: number[],
|
||||
ttl = 600
|
||||
): Promise<void> {
|
||||
const key = `indicator:${symbol}:${indicator}:${period}`;
|
||||
await this.set(key, data, ttl);
|
||||
}
|
||||
|
||||
async getIndicator(symbol: string, indicator: string, period: number): Promise<number[] | null> {
|
||||
const key = `indicator:${symbol}:${indicator}:${period}`;
|
||||
return this.get<number[]>(key);
|
||||
}
|
||||
|
||||
async waitForReady(timeout: number = 5000): Promise<void> {
|
||||
if (this.isConnected) {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const timeoutId = setTimeout(() => {
|
||||
reject(new Error(`Redis cache connection timeout after ${timeout}ms`));
|
||||
}, timeout);
|
||||
|
||||
const onReady = () => {
|
||||
clearTimeout(timeoutId);
|
||||
this.redis.off('ready', onReady);
|
||||
this.redis.off('error', onError);
|
||||
resolve();
|
||||
};
|
||||
|
||||
const onError = (error: Error) => {
|
||||
clearTimeout(timeoutId);
|
||||
this.redis.off('ready', onReady);
|
||||
this.redis.off('error', onError);
|
||||
reject(new Error(`Redis cache connection failed: ${error.message}`));
|
||||
};
|
||||
|
||||
if (this.redis.status === 'ready') {
|
||||
clearTimeout(timeoutId);
|
||||
resolve();
|
||||
} else {
|
||||
this.redis.once('ready', onReady);
|
||||
this.redis.once('error', onError);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
isReady(): boolean {
|
||||
return this.isConnected && this.redis.status === 'ready';
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the Redis connection
|
||||
*/
|
||||
async disconnect(): Promise<void> {
|
||||
await this.redis.quit();
|
||||
this.logger.info('Redis cache disconnected');
|
||||
}
|
||||
}
|
||||
import Redis from 'ioredis';
|
||||
import { getLogger } from '@stock-bot/logger';
|
||||
import { CacheProvider, CacheOptions, CacheStats } from './types';
|
||||
import { RedisConnectionManager } from './connection-manager';
|
||||
|
||||
/**
|
||||
* Simplified Redis-based cache provider using connection manager
|
||||
*/
|
||||
export class RedisCache implements CacheProvider {
|
||||
private redis: Redis;
|
||||
private logger = getLogger('redis-cache');
|
||||
private defaultTTL: number;
|
||||
private keyPrefix: string;
|
||||
private enableMetrics: boolean;
|
||||
private isConnected = false;
|
||||
private startTime = Date.now();
|
||||
private connectionManager: RedisConnectionManager;
|
||||
|
||||
private stats: CacheStats = {
|
||||
hits: 0,
|
||||
misses: 0,
|
||||
errors: 0,
|
||||
hitRate: 0,
|
||||
total: 0,
|
||||
uptime: 0
|
||||
};
|
||||
|
||||
constructor(options: CacheOptions = {}) {
|
||||
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
|
||||
this.keyPrefix = options.keyPrefix ?? 'cache:';
|
||||
this.enableMetrics = options.enableMetrics ?? true;
|
||||
|
||||
// Get connection manager instance
|
||||
this.connectionManager = RedisConnectionManager.getInstance();
|
||||
|
||||
// Generate connection name based on cache type
|
||||
const baseName = options.name || this.keyPrefix.replace(':', '').replace(/[^a-zA-Z0-9]/g, '').toUpperCase() || 'CACHE';
|
||||
|
||||
// Get Redis connection (shared by default for cache)
|
||||
this.redis = this.connectionManager.getConnection({
|
||||
name: `${baseName}-SERVICE`,
|
||||
singleton: options.shared ?? true, // Default to shared connection for cache
|
||||
});
|
||||
|
||||
this.setupEventHandlers();
|
||||
}
|
||||
|
||||
private setupEventHandlers(): void {
|
||||
this.redis.on('connect', () => {
|
||||
this.logger.info('Redis cache connected');
|
||||
});
|
||||
|
||||
this.redis.on('ready', () => {
|
||||
this.isConnected = true;
|
||||
this.logger.info('Redis cache ready');
|
||||
});
|
||||
|
||||
this.redis.on('error', (error: any) => {
|
||||
this.isConnected = false;
|
||||
this.logger.error('Redis cache connection error', { error: error.message });
|
||||
});
|
||||
|
||||
this.redis.on('close', () => {
|
||||
this.isConnected = false;
|
||||
this.logger.warn('Redis cache connection closed');
|
||||
});
|
||||
|
||||
this.redis.on('reconnecting', () => {
|
||||
this.logger.info('Redis cache reconnecting...');
|
||||
});
|
||||
}
|
||||
|
||||
private getKey(key: string): string {
|
||||
return `${this.keyPrefix}${key}`;
|
||||
}
|
||||
|
||||
private updateStats(hit: boolean, error = false): void {
|
||||
if (!this.enableMetrics) return;
|
||||
|
||||
if (error) {
|
||||
this.stats.errors++;
|
||||
} else if (hit) {
|
||||
this.stats.hits++;
|
||||
} else {
|
||||
this.stats.misses++;
|
||||
}
|
||||
|
||||
this.stats.total = this.stats.hits + this.stats.misses;
|
||||
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
|
||||
this.stats.uptime = Date.now() - this.startTime;
|
||||
}
|
||||
|
||||
private async safeExecute<T>(
|
||||
operation: () => Promise<T>,
|
||||
fallback: T,
|
||||
operationName: string
|
||||
): Promise<T> {
|
||||
try {
|
||||
if (!this.isConnected) {
|
||||
this.logger.warn(`Redis not connected for ${operationName}, using fallback`);
|
||||
this.updateStats(false, true);
|
||||
return fallback;
|
||||
}
|
||||
return await operation();
|
||||
} catch (error) {
|
||||
this.logger.error(`Redis ${operationName} failed`, {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
this.updateStats(false, true);
|
||||
return fallback;
|
||||
}
|
||||
}
|
||||
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
return this.safeExecute(
|
||||
async () => {
|
||||
const fullKey = this.getKey(key);
|
||||
const value = await this.redis.get(fullKey);
|
||||
|
||||
if (value === null) {
|
||||
this.updateStats(false);
|
||||
this.logger.debug('Cache miss', { key });
|
||||
return null;
|
||||
}
|
||||
|
||||
this.updateStats(true);
|
||||
this.logger.debug('Cache hit', { key });
|
||||
|
||||
try {
|
||||
return JSON.parse(value) as T;
|
||||
} catch {
|
||||
// If parsing fails, return as string
|
||||
return value as unknown as T;
|
||||
}
|
||||
},
|
||||
null,
|
||||
'get'
|
||||
);
|
||||
}
|
||||
|
||||
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
|
||||
await this.safeExecute(
|
||||
async () => {
|
||||
const fullKey = this.getKey(key);
|
||||
const expiry = ttl ?? this.defaultTTL;
|
||||
const serialized = typeof value === 'string' ? value : JSON.stringify(value);
|
||||
|
||||
await this.redis.setex(fullKey, expiry, serialized);
|
||||
this.logger.debug('Cache set', { key, ttl: expiry });
|
||||
},
|
||||
undefined,
|
||||
'set'
|
||||
);
|
||||
}
|
||||
|
||||
async del(key: string): Promise<void> {
|
||||
await this.safeExecute(
|
||||
async () => {
|
||||
const fullKey = this.getKey(key);
|
||||
await this.redis.del(fullKey);
|
||||
this.logger.debug('Cache delete', { key });
|
||||
},
|
||||
undefined,
|
||||
'del'
|
||||
);
|
||||
}
|
||||
|
||||
async exists(key: string): Promise<boolean> {
|
||||
return this.safeExecute(
|
||||
async () => {
|
||||
const fullKey = this.getKey(key);
|
||||
const result = await this.redis.exists(fullKey);
|
||||
return result === 1;
|
||||
},
|
||||
false,
|
||||
'exists'
|
||||
);
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
await this.safeExecute(
|
||||
async () => {
|
||||
const pattern = `${this.keyPrefix}*`;
|
||||
const keys = await this.redis.keys(pattern);
|
||||
if (keys.length > 0) {
|
||||
await this.redis.del(...keys);
|
||||
this.logger.info('Cache cleared', { keysDeleted: keys.length });
|
||||
}
|
||||
},
|
||||
undefined,
|
||||
'clear'
|
||||
);
|
||||
}
|
||||
|
||||
async health(): Promise<boolean> {
|
||||
try {
|
||||
const pong = await this.redis.ping();
|
||||
return pong === 'PONG';
|
||||
} catch (error) {
|
||||
this.logger.error('Redis health check failed', {
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
getStats(): CacheStats {
|
||||
return {
|
||||
...this.stats,
|
||||
uptime: Date.now() - this.startTime
|
||||
};
|
||||
}
|
||||
|
||||
async waitForReady(timeout = 5000): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (this.redis.status === 'ready') {
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
const timeoutId = setTimeout(() => {
|
||||
reject(new Error(`Redis connection timeout after ${timeout}ms`));
|
||||
}, timeout);
|
||||
|
||||
this.redis.once('ready', () => {
|
||||
clearTimeout(timeoutId);
|
||||
resolve();
|
||||
});
|
||||
|
||||
this.redis.once('error', (error) => {
|
||||
clearTimeout(timeoutId);
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
isReady(): boolean {
|
||||
return this.isConnected && this.redis.status === 'ready';
|
||||
}
|
||||
}
|
||||
8
libs/cache/src/types.ts
vendored
8
libs/cache/src/types.ts
vendored
|
|
@ -24,9 +24,8 @@ export interface CacheOptions {
|
|||
ttl?: number;
|
||||
keyPrefix?: string;
|
||||
enableMetrics?: boolean;
|
||||
maxMemoryItems?: number;
|
||||
memoryTTL?: number;
|
||||
name?: string; // Add name for connection identification
|
||||
name?: string; // Name for connection identification
|
||||
shared?: boolean; // Whether to use shared connection
|
||||
}
|
||||
|
||||
export interface CacheStats {
|
||||
|
|
@ -39,10 +38,9 @@ export interface CacheStats {
|
|||
}
|
||||
|
||||
export interface CacheConfig {
|
||||
type: 'redis' | 'memory' | 'hybrid';
|
||||
type: 'redis';
|
||||
keyPrefix?: string;
|
||||
defaultTTL?: number;
|
||||
maxMemoryItems?: number;
|
||||
enableMetrics?: boolean;
|
||||
compression?: boolean;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue