added cache and started fixing data-service

This commit is contained in:
Bojan Kucera 2025-06-05 08:27:06 -04:00
parent 3fc123eca3
commit d0e8fd9e3f
15 changed files with 1761 additions and 98 deletions

265
libs/cache/src/decorators/cacheable.ts vendored Normal file
View file

@ -0,0 +1,265 @@
import { createLogger } from '@stock-bot/logger';
import { CacheProvider } from '../types';
import { CacheKeyGenerator } from '../key-generator';
const logger = createLogger('cache-decorator');
/**
* Method decorator for automatic caching
*/
export function Cacheable(
cacheProvider: CacheProvider,
options: {
keyGenerator?: (args: any[], target?: any, methodName?: string) => string;
ttl?: number;
skipFirstArg?: boolean; // Skip 'this' if it's the first argument
} = {}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
descriptor.value = async function (...args: any[]) {
try {
// Generate cache key
const key = options.keyGenerator
? options.keyGenerator(args, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, args);
// Try to get from cache
const cached = await cacheProvider.get(key);
if (cached !== null) {
logger.debug('Method cache hit', {
class: target.constructor.name,
method: propertyName,
key
});
return cached;
}
// Execute method and cache result
const result = await originalMethod.apply(this, args);
await cacheProvider.set(key, result, options.ttl);
logger.debug('Method executed and cached', {
class: target.constructor.name,
method: propertyName,
key
});
return result;
} catch (error) {
logger.error('Cache decorator error', {
class: target.constructor.name,
method: propertyName,
error: error instanceof Error ? error.message : String(error)
});
// Fallback to original method if caching fails
return await originalMethod.apply(this, args);
}
};
};
}
/**
* Cache invalidation decorator
*/
export function CacheEvict(
cacheProvider: CacheProvider,
options: {
keyGenerator?: (args: any[], target?: any, methodName?: string) => string | string[];
evictBefore?: boolean; // Evict before method execution
} = {}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
descriptor.value = async function (...args: any[]) {
try {
const keys = options.keyGenerator
? options.keyGenerator(args, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, args);
const keysArray = Array.isArray(keys) ? keys : [keys];
if (options.evictBefore) {
// Evict before method execution
for (const key of keysArray) {
await cacheProvider.del(key);
}
logger.debug('Cache evicted before method execution', {
class: target.constructor.name,
method: propertyName,
keys: keysArray
});
}
// Execute method
const result = await originalMethod.apply(this, args);
if (!options.evictBefore) {
// Evict after method execution
for (const key of keysArray) {
await cacheProvider.del(key);
}
logger.debug('Cache evicted after method execution', {
class: target.constructor.name,
method: propertyName,
keys: keysArray
});
}
return result;
} catch (error) {
logger.error('Cache evict decorator error', {
class: target.constructor.name,
method: propertyName,
error: error instanceof Error ? error.message : String(error)
});
// Continue with original method execution even if eviction fails
return await originalMethod.apply(this, args);
}
};
};
}
/**
* Cache warming decorator - pre-populate cache with method results
*/
export function CacheWarm(
cacheProvider: CacheProvider,
options: {
keyGenerator?: (args: any[], target?: any, methodName?: string) => string;
ttl?: number;
warmupArgs: any[][]; // Array of argument arrays to warm up
}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
// Warmup cache when method is first accessed
let warmed = false;
descriptor.value = async function (...args: any[]) {
// Perform warmup if not done yet
if (!warmed) {
warmed = true;
setImmediate(async () => {
try {
for (const warmupArgs of options.warmupArgs) {
const key = options.keyGenerator
? options.keyGenerator(warmupArgs, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, warmupArgs);
// Check if already cached
const exists = await cacheProvider.exists(key);
if (!exists) {
const result = await originalMethod.apply(this, warmupArgs);
await cacheProvider.set(key, result, options.ttl);
}
}
logger.info('Cache warmed up', {
class: target.constructor.name,
method: propertyName,
count: options.warmupArgs.length
});
} catch (error) {
logger.error('Cache warmup failed', {
class: target.constructor.name,
method: propertyName,
error
});
}
});
}
// Execute normal cacheable logic
const key = options.keyGenerator
? options.keyGenerator(args, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, args);
const cached = await cacheProvider.get(key);
if (cached !== null) {
return cached;
}
const result = await originalMethod.apply(this, args);
await cacheProvider.set(key, result, options.ttl);
return result;
};
};
}
/**
* Trading-specific decorators
*/
/**
* Cache market data with appropriate TTL
*/
export function CacheMarketData(
cacheProvider: CacheProvider,
ttl: number = 300 // 5 minutes default
) {
return Cacheable(cacheProvider, {
keyGenerator: (args) => {
const [symbol, timeframe, date] = args;
return CacheKeyGenerator.marketData(symbol, timeframe, date);
},
ttl
});
}
/**
* Cache technical indicators
*/
export function CacheIndicator(
cacheProvider: CacheProvider,
ttl: number = 600 // 10 minutes default
) {
return Cacheable(cacheProvider, {
keyGenerator: (args) => {
const [symbol, indicator, period, data] = args;
const dataHash = hashArray(data);
return CacheKeyGenerator.indicator(symbol, indicator, period, dataHash);
},
ttl
});
}
/**
* Cache strategy results
*/
export function CacheStrategy(
cacheProvider: CacheProvider,
ttl: number = 1800 // 30 minutes default
) {
return Cacheable(cacheProvider, {
keyGenerator: (args) => {
const [strategyName, symbol, timeframe] = args;
return CacheKeyGenerator.strategy(strategyName, symbol, timeframe);
},
ttl
});
}
/**
* Helper functions
*/
function generateDefaultKey(className: string, methodName: string, args: any[]): string {
const argsHash = hashArray(args);
return `method:${className}:${methodName}:${argsHash}`;
}
function hashArray(arr: any[]): string {
const str = JSON.stringify(arr);
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}

View file

@ -1,26 +1,118 @@
import { dragonflyConfig } from '@stock-bot/config';
import { RedisCache } from './providers/redis-cache';
import { MemoryCache } from './providers/memory-cache';
import type { CacheProvider, CacheOptions } from './types';
import { HybridCache } from './providers/hybrid-cache';
import type { CacheProvider, CacheOptions, CacheConfig } from './types';
/**
* Factory for creating cache providers.
* Factory for creating cache providers with smart defaults
*
* @param type 'redis' | 'memory'
* @param type 'redis' | 'memory' | 'hybrid' | 'auto'
* @param options configuration for the cache
*/
export function createCache(
type: 'redis' | 'memory',
type: 'redis' | 'memory' | 'hybrid' | 'auto' = 'auto',
options: CacheOptions = {}
): CacheProvider {
if (type === 'redis') {
return new RedisCache(options);
// Auto-detect best cache type based on environment
if (type === 'auto') {
try {
// Try to use hybrid cache if Redis/Dragonfly is configured
if (dragonflyConfig.DRAGONFLY_HOST) {
type = 'hybrid';
} else {
type = 'memory';
}
} catch {
// Fallback to memory if config is not available
type = 'memory';
}
}
switch (type) {
case 'redis':
return new RedisCache(options);
case 'memory':
return new MemoryCache(options);
case 'hybrid':
return new HybridCache(options);
default:
throw new Error(`Unknown cache type: ${type}`);
}
return new MemoryCache(options);
}
export {
/**
* Create a cache instance with trading-optimized defaults
*/
export function createTradingCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'trading:',
ttl: 3600, // 1 hour default
memoryTTL: 300, // 5 minutes for memory cache
maxMemoryItems: 2000, // More items for trading data
enableMetrics: true,
...options
};
return createCache('auto', defaultOptions);
}
/**
* Create a cache for market data with appropriate settings
*/
export function createMarketDataCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'market:',
ttl: 300, // 5 minutes for market data
memoryTTL: 60, // 1 minute in memory
maxMemoryItems: 5000, // Lots of market data
enableMetrics: true,
...options
};
return createCache('auto', defaultOptions);
}
/**
* Create a cache for indicators with longer TTL
*/
export function createIndicatorCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'indicators:',
ttl: 1800, // 30 minutes for indicators
memoryTTL: 600, // 10 minutes in memory
maxMemoryItems: 1000,
enableMetrics: true,
...options
};
return createCache('auto', defaultOptions);
}
// Export types and classes
export type {
CacheProvider,
CacheOptions,
RedisCache,
MemoryCache
};
CacheConfig,
CacheStats,
CacheKey,
SerializationOptions
} from './types';
export { RedisCache } from './providers/redis-cache';
export { MemoryCache } from './providers/memory-cache';
export { HybridCache } from './providers/hybrid-cache';
export { CacheKeyGenerator } from './key-generator';
export {
Cacheable,
CacheEvict,
CacheWarm,
CacheMarketData,
CacheIndicator,
CacheStrategy
} from './decorators/cacheable';
// Default export for convenience
export default createCache;

73
libs/cache/src/key-generator.ts vendored Normal file
View file

@ -0,0 +1,73 @@
export class CacheKeyGenerator {
/**
* Generate cache key for market data
*/
static marketData(symbol: string, timeframe: string, date?: Date): string {
const dateStr = date ? date.toISOString().split('T')[0] : 'latest';
return `market:${symbol.toLowerCase()}:${timeframe}:${dateStr}`;
}
/**
* Generate cache key for technical indicators
*/
static indicator(symbol: string, indicator: string, period: number, dataHash: string): string {
return `indicator:${symbol.toLowerCase()}:${indicator}:${period}:${dataHash}`;
}
/**
* Generate cache key for backtest results
*/
static backtest(strategyName: string, params: Record<string, any>): string {
const paramHash = this.hashObject(params);
return `backtest:${strategyName}:${paramHash}`;
}
/**
* Generate cache key for strategy results
*/
static strategy(strategyName: string, symbol: string, timeframe: string): string {
return `strategy:${strategyName}:${symbol.toLowerCase()}:${timeframe}`;
}
/**
* Generate cache key for user sessions
*/
static userSession(userId: string): string {
return `session:${userId}`;
}
/**
* Generate cache key for portfolio data
*/
static portfolio(userId: string, portfolioId: string): string {
return `portfolio:${userId}:${portfolioId}`;
}
/**
* Generate cache key for real-time prices
*/
static realtimePrice(symbol: string): string {
return `price:realtime:${symbol.toLowerCase()}`;
}
/**
* Generate cache key for order book data
*/
static orderBook(symbol: string, depth: number = 10): string {
return `orderbook:${symbol.toLowerCase()}:${depth}`;
}
/**
* Create a simple hash from object for cache keys
*/
private static hashObject(obj: Record<string, any>): string {
const str = JSON.stringify(obj, Object.keys(obj).sort());
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}
}

261
libs/cache/src/providers/hybrid-cache.ts vendored Normal file
View file

@ -0,0 +1,261 @@
import { createLogger } from '@stock-bot/logger';
import { CacheProvider, CacheOptions, CacheStats } from '../types';
import { RedisCache } from './redis-cache';
import { MemoryCache } from './memory-cache';
/**
* Hybrid cache provider that uses memory as L1 cache and Redis as L2 cache
* Provides the best of both worlds: fast memory access and persistent Redis storage
*/
export class HybridCache implements CacheProvider {
private memoryCache: MemoryCache;
private redisCache: RedisCache;
private logger = createLogger('hybrid-cache');
private enableMetrics: boolean;
private startTime = Date.now();
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
};
constructor(options: CacheOptions = {}) {
this.enableMetrics = options.enableMetrics ?? true;
// Create L1 (memory) cache with shorter TTL
this.memoryCache = new MemoryCache({
...options,
ttl: options.memoryTTL ?? 300, // 5 minutes for memory
maxMemoryItems: options.maxMemoryItems ?? 1000,
enableMetrics: false // We'll handle metrics at hybrid level
});
// Create L2 (Redis) cache with longer TTL
this.redisCache = new RedisCache({
...options,
enableMetrics: false // We'll handle metrics at hybrid level
});
this.logger.info('Hybrid cache initialized', {
memoryTTL: options.memoryTTL ?? 300,
redisTTL: options.ttl ?? 3600,
maxMemoryItems: options.maxMemoryItems ?? 1000
});
}
private updateStats(hit: boolean, error = false): void {
if (!this.enableMetrics) return;
if (error) {
this.stats.errors++;
} else if (hit) {
this.stats.hits++;
} else {
this.stats.misses++;
}
this.stats.total = this.stats.hits + this.stats.misses;
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
this.stats.uptime = Date.now() - this.startTime;
}
async get<T>(key: string): Promise<T | null> {
try {
// Try L1 cache first (memory)
const memoryValue = await this.memoryCache.get<T>(key);
if (memoryValue !== null) {
this.updateStats(true);
this.logger.debug('L1 cache hit', { key, hitRate: this.stats.hitRate });
return memoryValue;
}
// Try L2 cache (Redis)
const redisValue = await this.redisCache.get<T>(key);
if (redisValue !== null) {
// Populate L1 cache for next access
await this.memoryCache.set(key, redisValue);
this.updateStats(true);
this.logger.debug('L2 cache hit, populating L1', { key, hitRate: this.stats.hitRate });
return redisValue;
}
// Complete miss
this.updateStats(false);
this.logger.debug('Cache miss (both L1 and L2)', { key });
return null;
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache get error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return null;
}
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
try {
// Set in both caches
const memoryPromise = this.memoryCache.set(key, value, Math.min(ttl ?? 300, 300));
const redisPromise = this.redisCache.set(key, value, ttl);
await Promise.allSettled([memoryPromise, redisPromise]);
this.logger.debug('Cache set (both L1 and L2)', { key, ttl });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache set error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async del(key: string): Promise<void> {
try {
// Delete from both caches
const memoryPromise = this.memoryCache.del(key);
const redisPromise = this.redisCache.del(key);
await Promise.allSettled([memoryPromise, redisPromise]);
this.logger.debug('Cache delete (both L1 and L2)', { key });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache delete error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async exists(key: string): Promise<boolean> {
try {
// Check memory first, then Redis
const memoryExists = await this.memoryCache.exists(key);
if (memoryExists) return true;
return await this.redisCache.exists(key);
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache exists error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return false;
}
}
async clear(): Promise<void> {
try {
// Clear both caches
const memoryPromise = this.memoryCache.clear();
const redisPromise = this.redisCache.clear();
await Promise.allSettled([memoryPromise, redisPromise]);
this.logger.info('Cache cleared (both L1 and L2)');
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache clear error', {
error: error instanceof Error ? error.message : String(error)
});
}
}
async health(): Promise<boolean> {
try {
const memoryHealthy = await this.memoryCache.health();
const redisHealthy = await this.redisCache.health();
// Hybrid cache is healthy if at least one cache is working
const isHealthy = memoryHealthy || redisHealthy;
this.logger.debug('Hybrid cache health check', {
memory: memoryHealthy,
redis: redisHealthy,
overall: isHealthy
});
return isHealthy;
} catch (error) {
this.logger.error('Hybrid cache health check failed', { error });
return false;
}
}
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
};
}
/**
* Get detailed stats for both cache layers
*/
getDetailedStats() {
return {
hybrid: this.getStats(),
memory: this.memoryCache.getStats(),
redis: this.redisCache.getStats()
};
}
/**
* Warm up the memory cache with frequently accessed keys from Redis
*/
async warmupMemoryCache(keys: string[]): Promise<void> {
this.logger.info('Starting memory cache warmup', { keyCount: keys.length });
let warmed = 0;
for (const key of keys) {
try {
const value = await this.redisCache.get(key);
if (value !== null) {
await this.memoryCache.set(key, value);
warmed++;
}
} catch (error) {
this.logger.warn('Failed to warm up key', { key, error });
}
}
this.logger.info('Memory cache warmup completed', {
requested: keys.length,
warmed
});
}
/**
* Sync memory cache with Redis for specific keys
*/
async syncCaches(keys: string[]): Promise<void> {
for (const key of keys) {
try {
const redisValue = await this.redisCache.get(key);
if (redisValue !== null) {
await this.memoryCache.set(key, redisValue);
} else {
await this.memoryCache.del(key);
}
} catch (error) {
this.logger.warn('Failed to sync key', { key, error });
}
}
}
/**
* Close connections for both caches
*/
async disconnect(): Promise<void> {
await this.redisCache.disconnect();
this.logger.info('Hybrid cache disconnected');
}
}

View file

@ -1,48 +1,259 @@
import { CacheProvider } from '../types';
import { createLogger } from '@stock-bot/logger';
import { CacheProvider, CacheOptions, CacheStats } from '../types';
interface CacheEntry<T> {
value: T;
expiry: number;
accessed: number;
}
/**
* Simple in-memory cache provider.
* In-memory cache provider with LRU eviction and comprehensive metrics
*/
export class MemoryCache implements CacheProvider {
private store = new Map<string, any>();
private store = new Map<string, CacheEntry<any>>();
private logger = createLogger('memory-cache');
private defaultTTL: number;
private keyPrefix: string;
private maxItems: number;
private enableMetrics: boolean;
private startTime = Date.now();
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
};
constructor(options: { ttl?: number; keyPrefix?: string } = {}) {
this.defaultTTL = options.ttl ?? 3600;
constructor(options: CacheOptions = {}) {
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
this.keyPrefix = options.keyPrefix ?? 'cache:';
this.maxItems = options.maxMemoryItems ?? 1000;
this.enableMetrics = options.enableMetrics ?? true;
this.logger.info('Memory cache initialized', {
maxItems: this.maxItems,
defaultTTL: this.defaultTTL,
enableMetrics: this.enableMetrics
});
// Cleanup expired entries every 5 minutes
setInterval(() => this.cleanup(), 5 * 60 * 1000);
}
private getKey(key: string): string {
return `${this.keyPrefix}${key}`;
}
private updateStats(hit: boolean, error = false): void {
if (!this.enableMetrics) return;
if (error) {
this.stats.errors++;
} else if (hit) {
this.stats.hits++;
} else {
this.stats.misses++;
}
this.stats.total = this.stats.hits + this.stats.misses;
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
this.stats.uptime = Date.now() - this.startTime;
}
private cleanup(): void {
const now = Date.now();
let cleaned = 0;
for (const [key, entry] of this.store.entries()) {
if (entry.expiry < now) {
this.store.delete(key);
cleaned++;
}
}
if (cleaned > 0) {
this.logger.debug('Cleaned expired entries', {
cleaned,
remaining: this.store.size
});
}
}
private evictLRU(): void {
if (this.store.size <= this.maxItems) return;
// Find least recently accessed item
let oldestKey = '';
let oldestAccess = Date.now();
for (const [key, entry] of this.store.entries()) {
if (entry.accessed < oldestAccess) {
oldestAccess = entry.accessed;
oldestKey = key;
}
}
if (oldestKey) {
this.store.delete(oldestKey);
this.logger.debug('Evicted LRU entry', { key: oldestKey });
}
}
async get<T>(key: string): Promise<T | null> {
const fullKey = this.getKey(key);
const entry = this.store.get(fullKey);
if (!entry) return null;
if (entry.expiry < Date.now()) {
this.store.delete(fullKey);
try {
const fullKey = this.getKey(key);
const entry = this.store.get(fullKey);
if (!entry) {
this.updateStats(false);
this.logger.debug('Cache miss', { key });
return null;
}
const now = Date.now();
if (entry.expiry < now) {
this.store.delete(fullKey);
this.updateStats(false);
this.logger.debug('Cache miss (expired)', { key });
return null;
}
// Update access time for LRU
entry.accessed = now;
this.updateStats(true);
this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate });
return entry.value;
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache get error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return null;
}
return entry.value;
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
const fullKey = this.getKey(key);
const expiry = Date.now() + 1000 * (ttl ?? this.defaultTTL);
this.store.set(fullKey, { value, expiry });
try {
const fullKey = this.getKey(key);
const now = Date.now();
const expiry = now + 1000 * (ttl ?? this.defaultTTL);
// Evict if necessary
this.evictLRU();
this.store.set(fullKey, {
value,
expiry,
accessed: now
});
this.logger.debug('Cache set', { key, ttl: ttl ?? this.defaultTTL });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache set error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async del(key: string): Promise<void> {
this.store.delete(this.getKey(key));
try {
const fullKey = this.getKey(key);
const deleted = this.store.delete(fullKey);
this.logger.debug('Cache delete', { key, deleted });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache delete error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async exists(key: string): Promise<boolean> {
return (await this.get(key)) !== null;
try {
const fullKey = this.getKey(key);
const entry = this.store.get(fullKey);
if (!entry) return false;
// Check if expired
if (entry.expiry < Date.now()) {
this.store.delete(fullKey);
return false;
}
return true;
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache exists error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return false;
}
}
async clear(): Promise<void> {
this.store.clear();
try {
const size = this.store.size;
this.store.clear();
this.logger.info('Cache cleared', { entriesDeleted: size });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache clear error', {
error: error instanceof Error ? error.message : String(error)
});
}
}
async health(): Promise<boolean> {
try {
// Simple health check - try to set and get a test value
await this.set('__health_check__', 'ok', 1);
const result = await this.get('__health_check__');
await this.del('__health_check__');
return result === 'ok';
} catch (error) {
this.logger.error('Memory cache health check failed', { error });
return false;
}
}
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
};
}
/**
* Get additional memory cache specific stats
*/
getMemoryStats() {
return {
...this.getStats(),
entries: this.store.size,
maxItems: this.maxItems,
memoryUsage: this.estimateMemoryUsage()
};
}
private estimateMemoryUsage(): number {
// Rough estimation of memory usage in bytes
let bytes = 0;
for (const [key, entry] of this.store.entries()) {
bytes += key.length * 2; // UTF-16 characters
bytes += JSON.stringify(entry.value).length * 2;
bytes += 24; // Overhead for entry object
}
return bytes;
}
}

View file

@ -1,59 +1,263 @@
import Redis, { RedisOptions } from 'ioredis';
import { CacheProvider, CacheOptions } from '../types';
import Redis from 'ioredis';
import { createLogger } from '@stock-bot/logger';
import { dragonflyConfig } from '@stock-bot/config';
import { CacheProvider, CacheOptions, CacheStats } from '../types';
/**
* Redis-based cache provider implementing CacheProvider interface.
* Redis-based cache provider with comprehensive error handling and metrics
*/
export class RedisCache implements CacheProvider {
private redis: Redis;
private logger = createLogger('redis-cache');
private defaultTTL: number;
private keyPrefix: string;
private enableMetrics: boolean;
private isConnected = false;
private startTime = Date.now();
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
};
constructor(options: CacheOptions = {}) {
if (options.redisUrl) {
this.redis = new Redis(options.redisUrl);
} else {
this.redis = new Redis(options.redisOptions as RedisOptions);
}
this.defaultTTL = options.ttl ?? 3600; // default 1 hour
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
this.keyPrefix = options.keyPrefix ?? 'cache:';
this.enableMetrics = options.enableMetrics ?? true;
const redisConfig = {
host: dragonflyConfig.DRAGONFLY_HOST,
port: dragonflyConfig.DRAGONFLY_PORT,
password: dragonflyConfig.DRAGONFLY_PASSWORD || undefined,
username: dragonflyConfig.DRAGONFLY_USERNAME || undefined,
db: dragonflyConfig.DRAGONFLY_DATABASE,
maxRetriesPerRequest: dragonflyConfig.DRAGONFLY_MAX_RETRIES,
retryDelayOnFailover: dragonflyConfig.DRAGONFLY_RETRY_DELAY,
connectTimeout: dragonflyConfig.DRAGONFLY_CONNECT_TIMEOUT,
commandTimeout: dragonflyConfig.DRAGONFLY_COMMAND_TIMEOUT,
keepAlive: dragonflyConfig.DRAGONFLY_ENABLE_KEEPALIVE ? dragonflyConfig.DRAGONFLY_KEEPALIVE_INTERVAL * 1000 : 0,
...(dragonflyConfig.DRAGONFLY_TLS && {
tls: {
cert: dragonflyConfig.DRAGONFLY_TLS_CERT_FILE || undefined,
key: dragonflyConfig.DRAGONFLY_TLS_KEY_FILE || undefined,
ca: dragonflyConfig.DRAGONFLY_TLS_CA_FILE || undefined,
rejectUnauthorized: !dragonflyConfig.DRAGONFLY_TLS_SKIP_VERIFY,
}
})
};
this.redis = new Redis(redisConfig);
this.setupEventHandlers();
}
private setupEventHandlers(): void {
this.redis.on('connect', () => {
this.isConnected = true;
this.logger.info('Redis cache connected', {
host: dragonflyConfig.DRAGONFLY_HOST,
port: dragonflyConfig.DRAGONFLY_PORT,
db: dragonflyConfig.DRAGONFLY_DATABASE
});
});
this.redis.on('ready', () => {
this.logger.info('Redis cache ready for commands');
});
this.redis.on('error', (error) => {
this.isConnected = false;
this.stats.errors++;
this.logger.error('Redis cache connection error', { error: error.message });
});
this.redis.on('close', () => {
this.isConnected = false;
this.logger.warn('Redis cache connection closed');
});
this.redis.on('reconnecting', () => {
this.logger.info('Redis cache reconnecting...');
});
}
private getKey(key: string): string {
return `${this.keyPrefix}${key}`;
}
async get<T>(key: string): Promise<T | null> {
const fullKey = this.getKey(key);
const val = await this.redis.get(fullKey);
if (val === null) return null;
private updateStats(hit: boolean, error = false): void {
if (!this.enableMetrics) return;
if (error) {
this.stats.errors++;
} else if (hit) {
this.stats.hits++;
} else {
this.stats.misses++;
}
this.stats.total = this.stats.hits + this.stats.misses;
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
this.stats.uptime = Date.now() - this.startTime;
}
private async safeExecute<T>(
operation: () => Promise<T>,
fallback: T,
operationName: string
): Promise<T> {
if (!this.isConnected) {
this.logger.warn(`Redis not connected for ${operationName}, using fallback`);
this.updateStats(false, true);
return fallback;
}
try {
return JSON.parse(val) as T;
} catch {
return (val as unknown) as T;
return await operation();
} catch (error) {
this.logger.error(`Redis ${operationName} failed`, {
error: error instanceof Error ? error.message : String(error)
});
this.updateStats(false, true);
return fallback;
}
}
async get<T>(key: string): Promise<T | null> {
return this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const value = await this.redis.get(fullKey);
if (value === null) {
this.updateStats(false);
this.logger.debug('Cache miss', { key });
return null;
}
this.updateStats(true);
this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate });
try {
return JSON.parse(value) as T;
} catch {
// Return as-is if not valid JSON
return value as unknown as T;
}
},
null,
'get'
);
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
const fullKey = this.getKey(key);
const str = typeof value === 'string' ? (value as unknown as string) : JSON.stringify(value);
const expiry = ttl ?? this.defaultTTL;
await this.redis.set(fullKey, str, 'EX', expiry);
await this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const serialized = typeof value === 'string' ? value : JSON.stringify(value);
const expiry = ttl ?? this.defaultTTL;
await this.redis.setex(fullKey, expiry, serialized);
this.logger.debug('Cache set', { key, ttl: expiry });
},
undefined,
'set'
);
}
async del(key: string): Promise<void> {
await this.redis.del(this.getKey(key));
await this.safeExecute(
async () => {
const fullKey = this.getKey(key);
await this.redis.del(fullKey);
this.logger.debug('Cache delete', { key });
},
undefined,
'del'
);
}
async exists(key: string): Promise<boolean> {
const exists = await this.redis.exists(this.getKey(key));
return exists === 1;
return this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const result = await this.redis.exists(fullKey);
return result === 1;
},
false,
'exists'
);
}
async clear(): Promise<void> {
const pattern = `${this.keyPrefix}*`;
const keys = await this.redis.keys(pattern);
if (keys.length) await this.redis.del(...keys);
await this.safeExecute(
async () => {
const pattern = `${this.keyPrefix}*`;
const keys = await this.redis.keys(pattern);
if (keys.length > 0) {
await this.redis.del(...keys);
this.logger.info('Cache cleared', { keysDeleted: keys.length });
}
},
undefined,
'clear'
);
}
async health(): Promise<boolean> {
try {
const pong = await this.redis.ping();
return pong === 'PONG' && this.isConnected;
} catch (error) {
this.logger.error('Redis health check failed', { error });
return false;
}
}
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
};
}
/**
* Trading-specific convenience methods
*/
async cacheMarketData(symbol: string, timeframe: string, data: any[], ttl = 300): Promise<void> {
const key = `market:${symbol}:${timeframe}`;
await this.set(key, data, ttl);
}
async getMarketData<T>(symbol: string, timeframe: string): Promise<T | null> {
const key = `market:${symbol}:${timeframe}`;
return this.get<T>(key);
}
async cacheIndicator(
symbol: string,
indicator: string,
period: number,
data: number[],
ttl = 600
): Promise<void> {
const key = `indicator:${symbol}:${indicator}:${period}`;
await this.set(key, data, ttl);
}
async getIndicator(symbol: string, indicator: string, period: number): Promise<number[] | null> {
const key = `indicator:${symbol}:${indicator}:${period}`;
return this.get<number[]>(key);
}
/**
* Close the Redis connection
*/
async disconnect(): Promise<void> {
await this.redis.quit();
this.logger.info('Redis cache disconnected');
}
}

View file

@ -1,34 +1,42 @@
import type { RedisOptions as IORedisOptions } from 'ioredis';
/**
* Interface for a generic cache provider.
*/
export interface CacheProvider {
get<T>(key: string): Promise<T | null>;
set<T>(key: string, value: T, ttl?: number): Promise<void>;
del(key: string): Promise<void>;
exists(key: string): Promise<boolean>;
clear(): Promise<void>;
getStats(): CacheStats;
health(): Promise<boolean>;
}
/**
* Options for configuring the cache provider.
*/
export interface CacheOptions {
/**
* Full Redis connection string (e.g., redis://localhost:6379)
*/
redisUrl?: string;
/**
* Raw ioredis connection options if not using a URL.
*/
redisOptions?: IORedisOptions;
/**
* Default time-to-live for cache entries (in seconds).
*/
ttl?: number;
/**
* Prefix to use for all cache keys.
*/
keyPrefix?: string;
enableMetrics?: boolean;
maxMemoryItems?: number;
memoryTTL?: number;
}
export interface CacheStats {
hits: number;
misses: number;
errors: number;
hitRate: number;
total: number;
uptime: number;
}
export interface CacheConfig {
type: 'redis' | 'memory' | 'hybrid';
keyPrefix?: string;
defaultTTL?: number;
maxMemoryItems?: number;
enableMetrics?: boolean;
compression?: boolean;
}
export type CacheKey = string | (() => string);
export interface SerializationOptions {
compress?: boolean;
binary?: boolean;
}