linxus fs fixes

This commit is contained in:
Boki 2025-06-09 22:55:51 -04:00
parent ac23b70146
commit 0b7846fe67
292 changed files with 41947 additions and 41947 deletions

View file

@ -1,32 +1,32 @@
{
"name": "@stock-bot/cache",
"version": "1.0.0",
"description": "Caching library for Redis and in-memory providers",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"clean": "rimraf dist",
"test": "bun test"
},
"dependencies": {
"ioredis": "^5.3.2"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}
{
"name": "@stock-bot/cache",
"version": "1.0.0",
"description": "Caching library for Redis and in-memory providers",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"clean": "rimraf dist",
"test": "bun test"
},
"dependencies": {
"ioredis": "^5.3.2"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}

View file

@ -1,265 +1,265 @@
import { getLogger } from '@stock-bot/logger';
import { CacheProvider } from '../types';
import { CacheKeyGenerator } from '../key-generator';
const logger = getLogger('cache-decorator');
/**
* Method decorator for automatic caching
*/
export function Cacheable(
cacheProvider: CacheProvider,
options: {
keyGenerator?: (args: any[], target?: any, methodName?: string) => string;
ttl?: number;
skipFirstArg?: boolean; // Skip 'this' if it's the first argument
} = {}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
descriptor.value = async function (...args: any[]) {
try {
// Generate cache key
const key = options.keyGenerator
? options.keyGenerator(args, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, args);
// Try to get from cache
const cached = await cacheProvider.get(key);
if (cached !== null) {
logger.debug('Method cache hit', {
class: target.constructor.name,
method: propertyName,
key
});
return cached;
}
// Execute method and cache result
const result = await originalMethod.apply(this, args);
await cacheProvider.set(key, result, options.ttl);
logger.debug('Method executed and cached', {
class: target.constructor.name,
method: propertyName,
key
});
return result;
} catch (error) {
logger.error('Cache decorator error', {
class: target.constructor.name,
method: propertyName,
error: error instanceof Error ? error.message : String(error)
});
// Fallback to original method if caching fails
return await originalMethod.apply(this, args);
}
};
};
}
/**
* Cache invalidation decorator
*/
export function CacheEvict(
cacheProvider: CacheProvider,
options: {
keyGenerator?: (args: any[], target?: any, methodName?: string) => string | string[];
evictBefore?: boolean; // Evict before method execution
} = {}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
descriptor.value = async function (...args: any[]) {
try {
const keys = options.keyGenerator
? options.keyGenerator(args, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, args);
const keysArray = Array.isArray(keys) ? keys : [keys];
if (options.evictBefore) {
// Evict before method execution
for (const key of keysArray) {
await cacheProvider.del(key);
}
logger.debug('Cache evicted before method execution', {
class: target.constructor.name,
method: propertyName,
keys: keysArray
});
}
// Execute method
const result = await originalMethod.apply(this, args);
if (!options.evictBefore) {
// Evict after method execution
for (const key of keysArray) {
await cacheProvider.del(key);
}
logger.debug('Cache evicted after method execution', {
class: target.constructor.name,
method: propertyName,
keys: keysArray
});
}
return result;
} catch (error) {
logger.error('Cache evict decorator error', {
class: target.constructor.name,
method: propertyName,
error: error instanceof Error ? error.message : String(error)
});
// Continue with original method execution even if eviction fails
return await originalMethod.apply(this, args);
}
};
};
}
/**
* Cache warming decorator - pre-populate cache with method results
*/
export function CacheWarm(
cacheProvider: CacheProvider,
options: {
keyGenerator?: (args: any[], target?: any, methodName?: string) => string;
ttl?: number;
warmupArgs: any[][]; // Array of argument arrays to warm up
}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
// Warmup cache when method is first accessed
let warmed = false;
descriptor.value = async function (...args: any[]) {
// Perform warmup if not done yet
if (!warmed) {
warmed = true;
setImmediate(async () => {
try {
for (const warmupArgs of options.warmupArgs) {
const key = options.keyGenerator
? options.keyGenerator(warmupArgs, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, warmupArgs);
// Check if already cached
const exists = await cacheProvider.exists(key);
if (!exists) {
const result = await originalMethod.apply(this, warmupArgs);
await cacheProvider.set(key, result, options.ttl);
}
}
logger.info('Cache warmed up', {
class: target.constructor.name,
method: propertyName,
count: options.warmupArgs.length
});
} catch (error) {
logger.error('Cache warmup failed', {
class: target.constructor.name,
method: propertyName,
error
});
}
});
}
// Execute normal cacheable logic
const key = options.keyGenerator
? options.keyGenerator(args, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, args);
const cached = await cacheProvider.get(key);
if (cached !== null) {
return cached;
}
const result = await originalMethod.apply(this, args);
await cacheProvider.set(key, result, options.ttl);
return result;
};
};
}
/**
* Trading-specific decorators
*/
/**
* Cache market data with appropriate TTL
*/
export function CacheMarketData(
cacheProvider: CacheProvider,
ttl: number = 300 // 5 minutes default
) {
return Cacheable(cacheProvider, {
keyGenerator: (args) => {
const [symbol, timeframe, date] = args;
return CacheKeyGenerator.marketData(symbol, timeframe, date);
},
ttl
});
}
/**
* Cache technical indicators
*/
export function CacheIndicator(
cacheProvider: CacheProvider,
ttl: number = 600 // 10 minutes default
) {
return Cacheable(cacheProvider, {
keyGenerator: (args) => {
const [symbol, indicator, period, data] = args;
const dataHash = hashArray(data);
return CacheKeyGenerator.indicator(symbol, indicator, period, dataHash);
},
ttl
});
}
/**
* Cache strategy results
*/
export function CacheStrategy(
cacheProvider: CacheProvider,
ttl: number = 1800 // 30 minutes default
) {
return Cacheable(cacheProvider, {
keyGenerator: (args) => {
const [strategyName, symbol, timeframe] = args;
return CacheKeyGenerator.strategy(strategyName, symbol, timeframe);
},
ttl
});
}
/**
* Helper functions
*/
function generateDefaultKey(className: string, methodName: string, args: any[]): string {
const argsHash = hashArray(args);
return `method:${className}:${methodName}:${argsHash}`;
}
function hashArray(arr: any[]): string {
const str = JSON.stringify(arr);
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}
import { getLogger } from '@stock-bot/logger';
import { CacheProvider } from '../types';
import { CacheKeyGenerator } from '../key-generator';
const logger = getLogger('cache-decorator');
/**
* Method decorator for automatic caching
*/
export function Cacheable(
cacheProvider: CacheProvider,
options: {
keyGenerator?: (args: any[], target?: any, methodName?: string) => string;
ttl?: number;
skipFirstArg?: boolean; // Skip 'this' if it's the first argument
} = {}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
descriptor.value = async function (...args: any[]) {
try {
// Generate cache key
const key = options.keyGenerator
? options.keyGenerator(args, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, args);
// Try to get from cache
const cached = await cacheProvider.get(key);
if (cached !== null) {
logger.debug('Method cache hit', {
class: target.constructor.name,
method: propertyName,
key
});
return cached;
}
// Execute method and cache result
const result = await originalMethod.apply(this, args);
await cacheProvider.set(key, result, options.ttl);
logger.debug('Method executed and cached', {
class: target.constructor.name,
method: propertyName,
key
});
return result;
} catch (error) {
logger.error('Cache decorator error', {
class: target.constructor.name,
method: propertyName,
error: error instanceof Error ? error.message : String(error)
});
// Fallback to original method if caching fails
return await originalMethod.apply(this, args);
}
};
};
}
/**
* Cache invalidation decorator
*/
export function CacheEvict(
cacheProvider: CacheProvider,
options: {
keyGenerator?: (args: any[], target?: any, methodName?: string) => string | string[];
evictBefore?: boolean; // Evict before method execution
} = {}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
descriptor.value = async function (...args: any[]) {
try {
const keys = options.keyGenerator
? options.keyGenerator(args, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, args);
const keysArray = Array.isArray(keys) ? keys : [keys];
if (options.evictBefore) {
// Evict before method execution
for (const key of keysArray) {
await cacheProvider.del(key);
}
logger.debug('Cache evicted before method execution', {
class: target.constructor.name,
method: propertyName,
keys: keysArray
});
}
// Execute method
const result = await originalMethod.apply(this, args);
if (!options.evictBefore) {
// Evict after method execution
for (const key of keysArray) {
await cacheProvider.del(key);
}
logger.debug('Cache evicted after method execution', {
class: target.constructor.name,
method: propertyName,
keys: keysArray
});
}
return result;
} catch (error) {
logger.error('Cache evict decorator error', {
class: target.constructor.name,
method: propertyName,
error: error instanceof Error ? error.message : String(error)
});
// Continue with original method execution even if eviction fails
return await originalMethod.apply(this, args);
}
};
};
}
/**
* Cache warming decorator - pre-populate cache with method results
*/
export function CacheWarm(
cacheProvider: CacheProvider,
options: {
keyGenerator?: (args: any[], target?: any, methodName?: string) => string;
ttl?: number;
warmupArgs: any[][]; // Array of argument arrays to warm up
}
) {
return function (target: any, propertyName: string, descriptor: PropertyDescriptor) {
const originalMethod = descriptor.value;
// Warmup cache when method is first accessed
let warmed = false;
descriptor.value = async function (...args: any[]) {
// Perform warmup if not done yet
if (!warmed) {
warmed = true;
setImmediate(async () => {
try {
for (const warmupArgs of options.warmupArgs) {
const key = options.keyGenerator
? options.keyGenerator(warmupArgs, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, warmupArgs);
// Check if already cached
const exists = await cacheProvider.exists(key);
if (!exists) {
const result = await originalMethod.apply(this, warmupArgs);
await cacheProvider.set(key, result, options.ttl);
}
}
logger.info('Cache warmed up', {
class: target.constructor.name,
method: propertyName,
count: options.warmupArgs.length
});
} catch (error) {
logger.error('Cache warmup failed', {
class: target.constructor.name,
method: propertyName,
error
});
}
});
}
// Execute normal cacheable logic
const key = options.keyGenerator
? options.keyGenerator(args, target, propertyName)
: generateDefaultKey(target.constructor.name, propertyName, args);
const cached = await cacheProvider.get(key);
if (cached !== null) {
return cached;
}
const result = await originalMethod.apply(this, args);
await cacheProvider.set(key, result, options.ttl);
return result;
};
};
}
/**
* Trading-specific decorators
*/
/**
* Cache market data with appropriate TTL
*/
export function CacheMarketData(
cacheProvider: CacheProvider,
ttl: number = 300 // 5 minutes default
) {
return Cacheable(cacheProvider, {
keyGenerator: (args) => {
const [symbol, timeframe, date] = args;
return CacheKeyGenerator.marketData(symbol, timeframe, date);
},
ttl
});
}
/**
* Cache technical indicators
*/
export function CacheIndicator(
cacheProvider: CacheProvider,
ttl: number = 600 // 10 minutes default
) {
return Cacheable(cacheProvider, {
keyGenerator: (args) => {
const [symbol, indicator, period, data] = args;
const dataHash = hashArray(data);
return CacheKeyGenerator.indicator(symbol, indicator, period, dataHash);
},
ttl
});
}
/**
* Cache strategy results
*/
export function CacheStrategy(
cacheProvider: CacheProvider,
ttl: number = 1800 // 30 minutes default
) {
return Cacheable(cacheProvider, {
keyGenerator: (args) => {
const [strategyName, symbol, timeframe] = args;
return CacheKeyGenerator.strategy(strategyName, symbol, timeframe);
},
ttl
});
}
/**
* Helper functions
*/
function generateDefaultKey(className: string, methodName: string, args: any[]): string {
const argsHash = hashArray(args);
return `method:${className}:${methodName}:${argsHash}`;
}
function hashArray(arr: any[]): string {
const str = JSON.stringify(arr);
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}

View file

@ -1,118 +1,118 @@
import { dragonflyConfig } from '@stock-bot/config';
import { RedisCache } from './providers/redis-cache';
import { MemoryCache } from './providers/memory-cache';
import { HybridCache } from './providers/hybrid-cache';
import type { CacheProvider, CacheOptions, CacheConfig } from './types';
/**
* Factory for creating cache providers with smart defaults
*
* @param type 'redis' | 'memory' | 'hybrid' | 'auto'
* @param options configuration for the cache
*/
export function createCache(
type: 'redis' | 'memory' | 'hybrid' | 'auto' = 'auto',
options: CacheOptions = {}
): CacheProvider {
// Auto-detect best cache type based on environment
if (type === 'auto') {
try {
// Try to use hybrid cache if Redis/Dragonfly is configured
if (dragonflyConfig.DRAGONFLY_HOST) {
type = 'hybrid';
} else {
type = 'memory';
}
} catch {
// Fallback to memory if config is not available
type = 'memory';
}
}
switch (type) {
case 'redis':
return new RedisCache(options);
case 'memory':
return new MemoryCache(options);
case 'hybrid':
return new HybridCache(options);
default:
throw new Error(`Unknown cache type: ${type}`);
}
}
/**
* Create a cache instance with trading-optimized defaults
*/
export function createTradingCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'trading:',
ttl: 3600, // 1 hour default
memoryTTL: 300, // 5 minutes for memory cache
maxMemoryItems: 2000, // More items for trading data
enableMetrics: true,
...options
};
return createCache('auto', defaultOptions);
}
/**
* Create a cache for market data with appropriate settings
*/
export function createMarketDataCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'market:',
ttl: 300, // 5 minutes for market data
memoryTTL: 60, // 1 minute in memory
maxMemoryItems: 5000, // Lots of market data
enableMetrics: true,
...options
};
return createCache('auto', defaultOptions);
}
/**
* Create a cache for indicators with longer TTL
*/
export function createIndicatorCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'indicators:',
ttl: 1800, // 30 minutes for indicators
memoryTTL: 600, // 10 minutes in memory
maxMemoryItems: 1000,
enableMetrics: true,
...options
};
return createCache('auto', defaultOptions);
}
// Export types and classes
export type {
CacheProvider,
CacheOptions,
CacheConfig,
CacheStats,
CacheKey,
SerializationOptions
} from './types';
export { RedisCache } from './providers/redis-cache';
export { MemoryCache } from './providers/memory-cache';
export { HybridCache } from './providers/hybrid-cache';
export { CacheKeyGenerator } from './key-generator';
export {
Cacheable,
CacheEvict,
CacheWarm,
CacheMarketData,
CacheIndicator,
CacheStrategy
} from './decorators/cacheable';
// Default export for convenience
import { dragonflyConfig } from '@stock-bot/config';
import { RedisCache } from './providers/redis-cache';
import { MemoryCache } from './providers/memory-cache';
import { HybridCache } from './providers/hybrid-cache';
import type { CacheProvider, CacheOptions, CacheConfig } from './types';
/**
* Factory for creating cache providers with smart defaults
*
* @param type 'redis' | 'memory' | 'hybrid' | 'auto'
* @param options configuration for the cache
*/
export function createCache(
type: 'redis' | 'memory' | 'hybrid' | 'auto' = 'auto',
options: CacheOptions = {}
): CacheProvider {
// Auto-detect best cache type based on environment
if (type === 'auto') {
try {
// Try to use hybrid cache if Redis/Dragonfly is configured
if (dragonflyConfig.DRAGONFLY_HOST) {
type = 'hybrid';
} else {
type = 'memory';
}
} catch {
// Fallback to memory if config is not available
type = 'memory';
}
}
switch (type) {
case 'redis':
return new RedisCache(options);
case 'memory':
return new MemoryCache(options);
case 'hybrid':
return new HybridCache(options);
default:
throw new Error(`Unknown cache type: ${type}`);
}
}
/**
* Create a cache instance with trading-optimized defaults
*/
export function createTradingCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'trading:',
ttl: 3600, // 1 hour default
memoryTTL: 300, // 5 minutes for memory cache
maxMemoryItems: 2000, // More items for trading data
enableMetrics: true,
...options
};
return createCache('auto', defaultOptions);
}
/**
* Create a cache for market data with appropriate settings
*/
export function createMarketDataCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'market:',
ttl: 300, // 5 minutes for market data
memoryTTL: 60, // 1 minute in memory
maxMemoryItems: 5000, // Lots of market data
enableMetrics: true,
...options
};
return createCache('auto', defaultOptions);
}
/**
* Create a cache for indicators with longer TTL
*/
export function createIndicatorCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'indicators:',
ttl: 1800, // 30 minutes for indicators
memoryTTL: 600, // 10 minutes in memory
maxMemoryItems: 1000,
enableMetrics: true,
...options
};
return createCache('auto', defaultOptions);
}
// Export types and classes
export type {
CacheProvider,
CacheOptions,
CacheConfig,
CacheStats,
CacheKey,
SerializationOptions
} from './types';
export { RedisCache } from './providers/redis-cache';
export { MemoryCache } from './providers/memory-cache';
export { HybridCache } from './providers/hybrid-cache';
export { CacheKeyGenerator } from './key-generator';
export {
Cacheable,
CacheEvict,
CacheWarm,
CacheMarketData,
CacheIndicator,
CacheStrategy
} from './decorators/cacheable';
// Default export for convenience
export default createCache;

View file

@ -1,73 +1,73 @@
export class CacheKeyGenerator {
/**
* Generate cache key for market data
*/
static marketData(symbol: string, timeframe: string, date?: Date): string {
const dateStr = date ? date.toISOString().split('T')[0] : 'latest';
return `market:${symbol.toLowerCase()}:${timeframe}:${dateStr}`;
}
/**
* Generate cache key for technical indicators
*/
static indicator(symbol: string, indicator: string, period: number, dataHash: string): string {
return `indicator:${symbol.toLowerCase()}:${indicator}:${period}:${dataHash}`;
}
/**
* Generate cache key for backtest results
*/
static backtest(strategyName: string, params: Record<string, any>): string {
const paramHash = this.hashObject(params);
return `backtest:${strategyName}:${paramHash}`;
}
/**
* Generate cache key for strategy results
*/
static strategy(strategyName: string, symbol: string, timeframe: string): string {
return `strategy:${strategyName}:${symbol.toLowerCase()}:${timeframe}`;
}
/**
* Generate cache key for user sessions
*/
static userSession(userId: string): string {
return `session:${userId}`;
}
/**
* Generate cache key for portfolio data
*/
static portfolio(userId: string, portfolioId: string): string {
return `portfolio:${userId}:${portfolioId}`;
}
/**
* Generate cache key for real-time prices
*/
static realtimePrice(symbol: string): string {
return `price:realtime:${symbol.toLowerCase()}`;
}
/**
* Generate cache key for order book data
*/
static orderBook(symbol: string, depth: number = 10): string {
return `orderbook:${symbol.toLowerCase()}:${depth}`;
}
/**
* Create a simple hash from object for cache keys
*/
private static hashObject(obj: Record<string, any>): string {
const str = JSON.stringify(obj, Object.keys(obj).sort());
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}
}
export class CacheKeyGenerator {
/**
* Generate cache key for market data
*/
static marketData(symbol: string, timeframe: string, date?: Date): string {
const dateStr = date ? date.toISOString().split('T')[0] : 'latest';
return `market:${symbol.toLowerCase()}:${timeframe}:${dateStr}`;
}
/**
* Generate cache key for technical indicators
*/
static indicator(symbol: string, indicator: string, period: number, dataHash: string): string {
return `indicator:${symbol.toLowerCase()}:${indicator}:${period}:${dataHash}`;
}
/**
* Generate cache key for backtest results
*/
static backtest(strategyName: string, params: Record<string, any>): string {
const paramHash = this.hashObject(params);
return `backtest:${strategyName}:${paramHash}`;
}
/**
* Generate cache key for strategy results
*/
static strategy(strategyName: string, symbol: string, timeframe: string): string {
return `strategy:${strategyName}:${symbol.toLowerCase()}:${timeframe}`;
}
/**
* Generate cache key for user sessions
*/
static userSession(userId: string): string {
return `session:${userId}`;
}
/**
* Generate cache key for portfolio data
*/
static portfolio(userId: string, portfolioId: string): string {
return `portfolio:${userId}:${portfolioId}`;
}
/**
* Generate cache key for real-time prices
*/
static realtimePrice(symbol: string): string {
return `price:realtime:${symbol.toLowerCase()}`;
}
/**
* Generate cache key for order book data
*/
static orderBook(symbol: string, depth: number = 10): string {
return `orderbook:${symbol.toLowerCase()}:${depth}`;
}
/**
* Create a simple hash from object for cache keys
*/
private static hashObject(obj: Record<string, any>): string {
const str = JSON.stringify(obj, Object.keys(obj).sort());
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}
}

View file

@ -1,261 +1,261 @@
import { getLogger } from '@stock-bot/logger';
import { CacheProvider, CacheOptions, CacheStats } from '../types';
import { RedisCache } from './redis-cache';
import { MemoryCache } from './memory-cache';
/**
* Hybrid cache provider that uses memory as L1 cache and Redis as L2 cache
* Provides the best of both worlds: fast memory access and persistent Redis storage
*/
export class HybridCache implements CacheProvider {
private memoryCache: MemoryCache;
private redisCache: RedisCache;
private logger = getLogger('hybrid-cache');
private enableMetrics: boolean;
private startTime = Date.now();
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
};
constructor(options: CacheOptions = {}) {
this.enableMetrics = options.enableMetrics ?? true;
// Create L1 (memory) cache with shorter TTL
this.memoryCache = new MemoryCache({
...options,
ttl: options.memoryTTL ?? 300, // 5 minutes for memory
maxMemoryItems: options.maxMemoryItems ?? 1000,
enableMetrics: false // We'll handle metrics at hybrid level
});
// Create L2 (Redis) cache with longer TTL
this.redisCache = new RedisCache({
...options,
enableMetrics: false // We'll handle metrics at hybrid level
});
this.logger.info('Hybrid cache initialized', {
memoryTTL: options.memoryTTL ?? 300,
redisTTL: options.ttl ?? 3600,
maxMemoryItems: options.maxMemoryItems ?? 1000
});
}
private updateStats(hit: boolean, error = false): void {
if (!this.enableMetrics) return;
if (error) {
this.stats.errors++;
} else if (hit) {
this.stats.hits++;
} else {
this.stats.misses++;
}
this.stats.total = this.stats.hits + this.stats.misses;
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
this.stats.uptime = Date.now() - this.startTime;
}
async get<T>(key: string): Promise<T | null> {
try {
// Try L1 cache first (memory)
const memoryValue = await this.memoryCache.get<T>(key);
if (memoryValue !== null) {
this.updateStats(true);
this.logger.debug('L1 cache hit', { key, hitRate: this.stats.hitRate });
return memoryValue;
}
// Try L2 cache (Redis)
const redisValue = await this.redisCache.get<T>(key);
if (redisValue !== null) {
// Populate L1 cache for next access
await this.memoryCache.set(key, redisValue);
this.updateStats(true);
this.logger.debug('L2 cache hit, populating L1', { key, hitRate: this.stats.hitRate });
return redisValue;
}
// Complete miss
this.updateStats(false);
this.logger.debug('Cache miss (both L1 and L2)', { key });
return null;
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache get error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return null;
}
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
try {
// Set in both caches
const memoryPromise = this.memoryCache.set(key, value, Math.min(ttl ?? 300, 300));
const redisPromise = this.redisCache.set(key, value, ttl);
await Promise.allSettled([memoryPromise, redisPromise]);
this.logger.debug('Cache set (both L1 and L2)', { key, ttl });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache set error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async del(key: string): Promise<void> {
try {
// Delete from both caches
const memoryPromise = this.memoryCache.del(key);
const redisPromise = this.redisCache.del(key);
await Promise.allSettled([memoryPromise, redisPromise]);
this.logger.debug('Cache delete (both L1 and L2)', { key });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache delete error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async exists(key: string): Promise<boolean> {
try {
// Check memory first, then Redis
const memoryExists = await this.memoryCache.exists(key);
if (memoryExists) return true;
return await this.redisCache.exists(key);
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache exists error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return false;
}
}
async clear(): Promise<void> {
try {
// Clear both caches
const memoryPromise = this.memoryCache.clear();
const redisPromise = this.redisCache.clear();
await Promise.allSettled([memoryPromise, redisPromise]);
this.logger.info('Cache cleared (both L1 and L2)');
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache clear error', {
error: error instanceof Error ? error.message : String(error)
});
}
}
async health(): Promise<boolean> {
try {
const memoryHealthy = await this.memoryCache.health();
const redisHealthy = await this.redisCache.health();
// Hybrid cache is healthy if at least one cache is working
const isHealthy = memoryHealthy || redisHealthy;
this.logger.debug('Hybrid cache health check', {
memory: memoryHealthy,
redis: redisHealthy,
overall: isHealthy
});
return isHealthy;
} catch (error) {
this.logger.error('Hybrid cache health check failed', error);
return false;
}
}
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
};
}
/**
* Get detailed stats for both cache layers
*/
getDetailedStats() {
return {
hybrid: this.getStats(),
memory: this.memoryCache.getStats(),
redis: this.redisCache.getStats()
};
}
/**
* Warm up the memory cache with frequently accessed keys from Redis
*/
async warmupMemoryCache(keys: string[]): Promise<void> {
this.logger.info('Starting memory cache warmup', { keyCount: keys.length });
let warmed = 0;
for (const key of keys) {
try {
const value = await this.redisCache.get(key);
if (value !== null) {
await this.memoryCache.set(key, value);
warmed++;
}
} catch (error) {
this.logger.warn('Failed to warm up key', { key, error });
}
}
this.logger.info('Memory cache warmup completed', {
requested: keys.length,
warmed
});
}
/**
* Sync memory cache with Redis for specific keys
*/
async syncCaches(keys: string[]): Promise<void> {
for (const key of keys) {
try {
const redisValue = await this.redisCache.get(key);
if (redisValue !== null) {
await this.memoryCache.set(key, redisValue);
} else {
await this.memoryCache.del(key);
}
} catch (error) {
this.logger.warn('Failed to sync key', { key, error });
}
}
}
/**
* Close connections for both caches
*/
async disconnect(): Promise<void> {
await this.redisCache.disconnect();
this.logger.info('Hybrid cache disconnected');
}
}
import { getLogger } from '@stock-bot/logger';
import { CacheProvider, CacheOptions, CacheStats } from '../types';
import { RedisCache } from './redis-cache';
import { MemoryCache } from './memory-cache';
/**
* Hybrid cache provider that uses memory as L1 cache and Redis as L2 cache
* Provides the best of both worlds: fast memory access and persistent Redis storage
*/
export class HybridCache implements CacheProvider {
private memoryCache: MemoryCache;
private redisCache: RedisCache;
private logger = getLogger('hybrid-cache');
private enableMetrics: boolean;
private startTime = Date.now();
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
};
constructor(options: CacheOptions = {}) {
this.enableMetrics = options.enableMetrics ?? true;
// Create L1 (memory) cache with shorter TTL
this.memoryCache = new MemoryCache({
...options,
ttl: options.memoryTTL ?? 300, // 5 minutes for memory
maxMemoryItems: options.maxMemoryItems ?? 1000,
enableMetrics: false // We'll handle metrics at hybrid level
});
// Create L2 (Redis) cache with longer TTL
this.redisCache = new RedisCache({
...options,
enableMetrics: false // We'll handle metrics at hybrid level
});
this.logger.info('Hybrid cache initialized', {
memoryTTL: options.memoryTTL ?? 300,
redisTTL: options.ttl ?? 3600,
maxMemoryItems: options.maxMemoryItems ?? 1000
});
}
private updateStats(hit: boolean, error = false): void {
if (!this.enableMetrics) return;
if (error) {
this.stats.errors++;
} else if (hit) {
this.stats.hits++;
} else {
this.stats.misses++;
}
this.stats.total = this.stats.hits + this.stats.misses;
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
this.stats.uptime = Date.now() - this.startTime;
}
async get<T>(key: string): Promise<T | null> {
try {
// Try L1 cache first (memory)
const memoryValue = await this.memoryCache.get<T>(key);
if (memoryValue !== null) {
this.updateStats(true);
this.logger.debug('L1 cache hit', { key, hitRate: this.stats.hitRate });
return memoryValue;
}
// Try L2 cache (Redis)
const redisValue = await this.redisCache.get<T>(key);
if (redisValue !== null) {
// Populate L1 cache for next access
await this.memoryCache.set(key, redisValue);
this.updateStats(true);
this.logger.debug('L2 cache hit, populating L1', { key, hitRate: this.stats.hitRate });
return redisValue;
}
// Complete miss
this.updateStats(false);
this.logger.debug('Cache miss (both L1 and L2)', { key });
return null;
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache get error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return null;
}
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
try {
// Set in both caches
const memoryPromise = this.memoryCache.set(key, value, Math.min(ttl ?? 300, 300));
const redisPromise = this.redisCache.set(key, value, ttl);
await Promise.allSettled([memoryPromise, redisPromise]);
this.logger.debug('Cache set (both L1 and L2)', { key, ttl });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache set error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async del(key: string): Promise<void> {
try {
// Delete from both caches
const memoryPromise = this.memoryCache.del(key);
const redisPromise = this.redisCache.del(key);
await Promise.allSettled([memoryPromise, redisPromise]);
this.logger.debug('Cache delete (both L1 and L2)', { key });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache delete error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async exists(key: string): Promise<boolean> {
try {
// Check memory first, then Redis
const memoryExists = await this.memoryCache.exists(key);
if (memoryExists) return true;
return await this.redisCache.exists(key);
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache exists error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return false;
}
}
async clear(): Promise<void> {
try {
// Clear both caches
const memoryPromise = this.memoryCache.clear();
const redisPromise = this.redisCache.clear();
await Promise.allSettled([memoryPromise, redisPromise]);
this.logger.info('Cache cleared (both L1 and L2)');
} catch (error) {
this.updateStats(false, true);
this.logger.error('Hybrid cache clear error', {
error: error instanceof Error ? error.message : String(error)
});
}
}
async health(): Promise<boolean> {
try {
const memoryHealthy = await this.memoryCache.health();
const redisHealthy = await this.redisCache.health();
// Hybrid cache is healthy if at least one cache is working
const isHealthy = memoryHealthy || redisHealthy;
this.logger.debug('Hybrid cache health check', {
memory: memoryHealthy,
redis: redisHealthy,
overall: isHealthy
});
return isHealthy;
} catch (error) {
this.logger.error('Hybrid cache health check failed', error);
return false;
}
}
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
};
}
/**
* Get detailed stats for both cache layers
*/
getDetailedStats() {
return {
hybrid: this.getStats(),
memory: this.memoryCache.getStats(),
redis: this.redisCache.getStats()
};
}
/**
* Warm up the memory cache with frequently accessed keys from Redis
*/
async warmupMemoryCache(keys: string[]): Promise<void> {
this.logger.info('Starting memory cache warmup', { keyCount: keys.length });
let warmed = 0;
for (const key of keys) {
try {
const value = await this.redisCache.get(key);
if (value !== null) {
await this.memoryCache.set(key, value);
warmed++;
}
} catch (error) {
this.logger.warn('Failed to warm up key', { key, error });
}
}
this.logger.info('Memory cache warmup completed', {
requested: keys.length,
warmed
});
}
/**
* Sync memory cache with Redis for specific keys
*/
async syncCaches(keys: string[]): Promise<void> {
for (const key of keys) {
try {
const redisValue = await this.redisCache.get(key);
if (redisValue !== null) {
await this.memoryCache.set(key, redisValue);
} else {
await this.memoryCache.del(key);
}
} catch (error) {
this.logger.warn('Failed to sync key', { key, error });
}
}
}
/**
* Close connections for both caches
*/
async disconnect(): Promise<void> {
await this.redisCache.disconnect();
this.logger.info('Hybrid cache disconnected');
}
}

View file

@ -1,259 +1,259 @@
import { getLogger } from '@stock-bot/logger';
import { CacheProvider, CacheOptions, CacheStats } from '../types';
interface CacheEntry<T> {
value: T;
expiry: number;
accessed: number;
}
/**
* In-memory cache provider with LRU eviction and comprehensive metrics
*/
export class MemoryCache implements CacheProvider {
private store = new Map<string, CacheEntry<any>>();
private logger = getLogger('memory-cache');
private defaultTTL: number;
private keyPrefix: string;
private maxItems: number;
private enableMetrics: boolean;
private startTime = Date.now();
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
};
constructor(options: CacheOptions = {}) {
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
this.keyPrefix = options.keyPrefix ?? 'cache:';
this.maxItems = options.maxMemoryItems ?? 1000;
this.enableMetrics = options.enableMetrics ?? true;
this.logger.info('Memory cache initialized', {
maxItems: this.maxItems,
defaultTTL: this.defaultTTL,
enableMetrics: this.enableMetrics
});
// Cleanup expired entries every 5 minutes
setInterval(() => this.cleanup(), 5 * 60 * 1000);
}
private getKey(key: string): string {
return `${this.keyPrefix}${key}`;
}
private updateStats(hit: boolean, error = false): void {
if (!this.enableMetrics) return;
if (error) {
this.stats.errors++;
} else if (hit) {
this.stats.hits++;
} else {
this.stats.misses++;
}
this.stats.total = this.stats.hits + this.stats.misses;
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
this.stats.uptime = Date.now() - this.startTime;
}
private cleanup(): void {
const now = Date.now();
let cleaned = 0;
for (const [key, entry] of this.store.entries()) {
if (entry.expiry < now) {
this.store.delete(key);
cleaned++;
}
}
if (cleaned > 0) {
this.logger.debug('Cleaned expired entries', {
cleaned,
remaining: this.store.size
});
}
}
private evictLRU(): void {
if (this.store.size <= this.maxItems) return;
// Find least recently accessed item
let oldestKey = '';
let oldestAccess = Date.now();
for (const [key, entry] of this.store.entries()) {
if (entry.accessed < oldestAccess) {
oldestAccess = entry.accessed;
oldestKey = key;
}
}
if (oldestKey) {
this.store.delete(oldestKey);
this.logger.debug('Evicted LRU entry', { key: oldestKey });
}
}
async get<T>(key: string): Promise<T | null> {
try {
const fullKey = this.getKey(key);
const entry = this.store.get(fullKey);
if (!entry) {
this.updateStats(false);
this.logger.debug('Cache miss', { key });
return null;
}
const now = Date.now();
if (entry.expiry < now) {
this.store.delete(fullKey);
this.updateStats(false);
this.logger.debug('Cache miss (expired)', { key });
return null;
}
// Update access time for LRU
entry.accessed = now;
this.updateStats(true);
this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate });
return entry.value;
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache get error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return null;
}
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
try {
const fullKey = this.getKey(key);
const now = Date.now();
const expiry = now + 1000 * (ttl ?? this.defaultTTL);
// Evict if necessary
this.evictLRU();
this.store.set(fullKey, {
value,
expiry,
accessed: now
});
this.logger.debug('Cache set', { key, ttl: ttl ?? this.defaultTTL });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache set error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async del(key: string): Promise<void> {
try {
const fullKey = this.getKey(key);
const deleted = this.store.delete(fullKey);
this.logger.debug('Cache delete', { key, deleted });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache delete error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async exists(key: string): Promise<boolean> {
try {
const fullKey = this.getKey(key);
const entry = this.store.get(fullKey);
if (!entry) return false;
// Check if expired
if (entry.expiry < Date.now()) {
this.store.delete(fullKey);
return false;
}
return true;
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache exists error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return false;
}
}
async clear(): Promise<void> {
try {
const size = this.store.size;
this.store.clear();
this.logger.info('Cache cleared', { entriesDeleted: size });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache clear error', {
error: error instanceof Error ? error.message : String(error)
});
}
}
async health(): Promise<boolean> {
try {
// Simple health check - try to set and get a test value
await this.set('__health_check__', 'ok', 1);
const result = await this.get('__health_check__');
await this.del('__health_check__');
return result === 'ok';
} catch (error) {
this.logger.error('Memory cache health check failed', error);
return false;
}
}
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
};
}
/**
* Get additional memory cache specific stats
*/
getMemoryStats() {
return {
...this.getStats(),
entries: this.store.size,
maxItems: this.maxItems,
memoryUsage: this.estimateMemoryUsage()
};
}
private estimateMemoryUsage(): number {
// Rough estimation of memory usage in bytes
let bytes = 0;
for (const [key, entry] of this.store.entries()) {
bytes += key.length * 2; // UTF-16 characters
bytes += JSON.stringify(entry.value).length * 2;
bytes += 24; // Overhead for entry object
}
return bytes;
}
}
import { getLogger } from '@stock-bot/logger';
import { CacheProvider, CacheOptions, CacheStats } from '../types';
interface CacheEntry<T> {
value: T;
expiry: number;
accessed: number;
}
/**
* In-memory cache provider with LRU eviction and comprehensive metrics
*/
export class MemoryCache implements CacheProvider {
private store = new Map<string, CacheEntry<any>>();
private logger = getLogger('memory-cache');
private defaultTTL: number;
private keyPrefix: string;
private maxItems: number;
private enableMetrics: boolean;
private startTime = Date.now();
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
};
constructor(options: CacheOptions = {}) {
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
this.keyPrefix = options.keyPrefix ?? 'cache:';
this.maxItems = options.maxMemoryItems ?? 1000;
this.enableMetrics = options.enableMetrics ?? true;
this.logger.info('Memory cache initialized', {
maxItems: this.maxItems,
defaultTTL: this.defaultTTL,
enableMetrics: this.enableMetrics
});
// Cleanup expired entries every 5 minutes
setInterval(() => this.cleanup(), 5 * 60 * 1000);
}
private getKey(key: string): string {
return `${this.keyPrefix}${key}`;
}
private updateStats(hit: boolean, error = false): void {
if (!this.enableMetrics) return;
if (error) {
this.stats.errors++;
} else if (hit) {
this.stats.hits++;
} else {
this.stats.misses++;
}
this.stats.total = this.stats.hits + this.stats.misses;
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
this.stats.uptime = Date.now() - this.startTime;
}
private cleanup(): void {
const now = Date.now();
let cleaned = 0;
for (const [key, entry] of this.store.entries()) {
if (entry.expiry < now) {
this.store.delete(key);
cleaned++;
}
}
if (cleaned > 0) {
this.logger.debug('Cleaned expired entries', {
cleaned,
remaining: this.store.size
});
}
}
private evictLRU(): void {
if (this.store.size <= this.maxItems) return;
// Find least recently accessed item
let oldestKey = '';
let oldestAccess = Date.now();
for (const [key, entry] of this.store.entries()) {
if (entry.accessed < oldestAccess) {
oldestAccess = entry.accessed;
oldestKey = key;
}
}
if (oldestKey) {
this.store.delete(oldestKey);
this.logger.debug('Evicted LRU entry', { key: oldestKey });
}
}
async get<T>(key: string): Promise<T | null> {
try {
const fullKey = this.getKey(key);
const entry = this.store.get(fullKey);
if (!entry) {
this.updateStats(false);
this.logger.debug('Cache miss', { key });
return null;
}
const now = Date.now();
if (entry.expiry < now) {
this.store.delete(fullKey);
this.updateStats(false);
this.logger.debug('Cache miss (expired)', { key });
return null;
}
// Update access time for LRU
entry.accessed = now;
this.updateStats(true);
this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate });
return entry.value;
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache get error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return null;
}
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
try {
const fullKey = this.getKey(key);
const now = Date.now();
const expiry = now + 1000 * (ttl ?? this.defaultTTL);
// Evict if necessary
this.evictLRU();
this.store.set(fullKey, {
value,
expiry,
accessed: now
});
this.logger.debug('Cache set', { key, ttl: ttl ?? this.defaultTTL });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache set error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async del(key: string): Promise<void> {
try {
const fullKey = this.getKey(key);
const deleted = this.store.delete(fullKey);
this.logger.debug('Cache delete', { key, deleted });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache delete error', {
key,
error: error instanceof Error ? error.message : String(error)
});
}
}
async exists(key: string): Promise<boolean> {
try {
const fullKey = this.getKey(key);
const entry = this.store.get(fullKey);
if (!entry) return false;
// Check if expired
if (entry.expiry < Date.now()) {
this.store.delete(fullKey);
return false;
}
return true;
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache exists error', {
key,
error: error instanceof Error ? error.message : String(error)
});
return false;
}
}
async clear(): Promise<void> {
try {
const size = this.store.size;
this.store.clear();
this.logger.info('Cache cleared', { entriesDeleted: size });
} catch (error) {
this.updateStats(false, true);
this.logger.error('Cache clear error', {
error: error instanceof Error ? error.message : String(error)
});
}
}
async health(): Promise<boolean> {
try {
// Simple health check - try to set and get a test value
await this.set('__health_check__', 'ok', 1);
const result = await this.get('__health_check__');
await this.del('__health_check__');
return result === 'ok';
} catch (error) {
this.logger.error('Memory cache health check failed', error);
return false;
}
}
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
};
}
/**
* Get additional memory cache specific stats
*/
getMemoryStats() {
return {
...this.getStats(),
entries: this.store.size,
maxItems: this.maxItems,
memoryUsage: this.estimateMemoryUsage()
};
}
private estimateMemoryUsage(): number {
// Rough estimation of memory usage in bytes
let bytes = 0;
for (const [key, entry] of this.store.entries()) {
bytes += key.length * 2; // UTF-16 characters
bytes += JSON.stringify(entry.value).length * 2;
bytes += 24; // Overhead for entry object
}
return bytes;
}
}

View file

@ -1,263 +1,263 @@
import Redis from 'ioredis';
import { getLogger } from '@stock-bot/logger';
import { dragonflyConfig } from '@stock-bot/config';
import { CacheProvider, CacheOptions, CacheStats } from '../types';
/**
* Redis-based cache provider with comprehensive error handling and metrics
*/
export class RedisCache implements CacheProvider {
private redis: Redis;
private logger = getLogger('redis-cache');
private defaultTTL: number;
private keyPrefix: string;
private enableMetrics: boolean;
private isConnected = false;
private startTime = Date.now();
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
};
constructor(options: CacheOptions = {}) {
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
this.keyPrefix = options.keyPrefix ?? 'cache:';
this.enableMetrics = options.enableMetrics ?? true;
const redisConfig = {
host: dragonflyConfig.DRAGONFLY_HOST,
port: dragonflyConfig.DRAGONFLY_PORT,
password: dragonflyConfig.DRAGONFLY_PASSWORD || undefined,
username: dragonflyConfig.DRAGONFLY_USERNAME || undefined,
db: dragonflyConfig.DRAGONFLY_DATABASE,
maxRetriesPerRequest: dragonflyConfig.DRAGONFLY_MAX_RETRIES,
retryDelayOnFailover: dragonflyConfig.DRAGONFLY_RETRY_DELAY,
connectTimeout: dragonflyConfig.DRAGONFLY_CONNECT_TIMEOUT,
commandTimeout: dragonflyConfig.DRAGONFLY_COMMAND_TIMEOUT,
keepAlive: dragonflyConfig.DRAGONFLY_ENABLE_KEEPALIVE ? dragonflyConfig.DRAGONFLY_KEEPALIVE_INTERVAL * 1000 : 0,
...(dragonflyConfig.DRAGONFLY_TLS && {
tls: {
cert: dragonflyConfig.DRAGONFLY_TLS_CERT_FILE || undefined,
key: dragonflyConfig.DRAGONFLY_TLS_KEY_FILE || undefined,
ca: dragonflyConfig.DRAGONFLY_TLS_CA_FILE || undefined,
rejectUnauthorized: !dragonflyConfig.DRAGONFLY_TLS_SKIP_VERIFY,
}
})
};
this.redis = new Redis(redisConfig);
this.setupEventHandlers();
}
private setupEventHandlers(): void {
this.redis.on('connect', () => {
this.isConnected = true;
this.logger.info('Redis cache connected', {
host: dragonflyConfig.DRAGONFLY_HOST,
port: dragonflyConfig.DRAGONFLY_PORT,
db: dragonflyConfig.DRAGONFLY_DATABASE
});
});
this.redis.on('ready', () => {
this.logger.info('Redis cache ready for commands');
});
this.redis.on('error', (error) => {
this.isConnected = false;
this.stats.errors++;
this.logger.error('Redis cache connection error', { error: error.message });
});
this.redis.on('close', () => {
this.isConnected = false;
this.logger.warn('Redis cache connection closed');
});
this.redis.on('reconnecting', () => {
this.logger.info('Redis cache reconnecting...');
});
}
private getKey(key: string): string {
return `${this.keyPrefix}${key}`;
}
private updateStats(hit: boolean, error = false): void {
if (!this.enableMetrics) return;
if (error) {
this.stats.errors++;
} else if (hit) {
this.stats.hits++;
} else {
this.stats.misses++;
}
this.stats.total = this.stats.hits + this.stats.misses;
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
this.stats.uptime = Date.now() - this.startTime;
}
private async safeExecute<T>(
operation: () => Promise<T>,
fallback: T,
operationName: string
): Promise<T> {
if (!this.isConnected) {
this.logger.warn(`Redis not connected for ${operationName}, using fallback`);
this.updateStats(false, true);
return fallback;
}
try {
return await operation();
} catch (error) {
this.logger.error(`Redis ${operationName} failed`, {
error: error instanceof Error ? error.message : String(error)
});
this.updateStats(false, true);
return fallback;
}
}
async get<T>(key: string): Promise<T | null> {
return this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const value = await this.redis.get(fullKey);
if (value === null) {
this.updateStats(false);
this.logger.debug('Cache miss', { key });
return null;
}
this.updateStats(true);
this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate });
try {
return JSON.parse(value) as T;
} catch {
// Return as-is if not valid JSON
return value as unknown as T;
}
},
null,
'get'
);
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
await this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const serialized = typeof value === 'string' ? value : JSON.stringify(value);
const expiry = ttl ?? this.defaultTTL;
await this.redis.setex(fullKey, expiry, serialized);
this.logger.debug('Cache set', { key, ttl: expiry });
},
undefined,
'set'
);
}
async del(key: string): Promise<void> {
await this.safeExecute(
async () => {
const fullKey = this.getKey(key);
await this.redis.del(fullKey);
this.logger.debug('Cache delete', { key });
},
undefined,
'del'
);
}
async exists(key: string): Promise<boolean> {
return this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const result = await this.redis.exists(fullKey);
return result === 1;
},
false,
'exists'
);
}
async clear(): Promise<void> {
await this.safeExecute(
async () => {
const pattern = `${this.keyPrefix}*`;
const keys = await this.redis.keys(pattern);
if (keys.length > 0) {
await this.redis.del(...keys);
this.logger.info('Cache cleared', { keysDeleted: keys.length });
}
},
undefined,
'clear'
);
}
async health(): Promise<boolean> {
try {
const pong = await this.redis.ping();
return pong === 'PONG' && this.isConnected;
} catch (error) {
this.logger.error('Redis health check failed', error);
return false;
}
}
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
};
}
/**
* Trading-specific convenience methods
*/
async cacheMarketData(symbol: string, timeframe: string, data: any[], ttl = 300): Promise<void> {
const key = `market:${symbol}:${timeframe}`;
await this.set(key, data, ttl);
}
async getMarketData<T>(symbol: string, timeframe: string): Promise<T | null> {
const key = `market:${symbol}:${timeframe}`;
return this.get<T>(key);
}
async cacheIndicator(
symbol: string,
indicator: string,
period: number,
data: number[],
ttl = 600
): Promise<void> {
const key = `indicator:${symbol}:${indicator}:${period}`;
await this.set(key, data, ttl);
}
async getIndicator(symbol: string, indicator: string, period: number): Promise<number[] | null> {
const key = `indicator:${symbol}:${indicator}:${period}`;
return this.get<number[]>(key);
}
/**
* Close the Redis connection
*/
async disconnect(): Promise<void> {
await this.redis.quit();
this.logger.info('Redis cache disconnected');
}
}
import Redis from 'ioredis';
import { getLogger } from '@stock-bot/logger';
import { dragonflyConfig } from '@stock-bot/config';
import { CacheProvider, CacheOptions, CacheStats } from '../types';
/**
* Redis-based cache provider with comprehensive error handling and metrics
*/
export class RedisCache implements CacheProvider {
private redis: Redis;
private logger = getLogger('redis-cache');
private defaultTTL: number;
private keyPrefix: string;
private enableMetrics: boolean;
private isConnected = false;
private startTime = Date.now();
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
};
constructor(options: CacheOptions = {}) {
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
this.keyPrefix = options.keyPrefix ?? 'cache:';
this.enableMetrics = options.enableMetrics ?? true;
const redisConfig = {
host: dragonflyConfig.DRAGONFLY_HOST,
port: dragonflyConfig.DRAGONFLY_PORT,
password: dragonflyConfig.DRAGONFLY_PASSWORD || undefined,
username: dragonflyConfig.DRAGONFLY_USERNAME || undefined,
db: dragonflyConfig.DRAGONFLY_DATABASE,
maxRetriesPerRequest: dragonflyConfig.DRAGONFLY_MAX_RETRIES,
retryDelayOnFailover: dragonflyConfig.DRAGONFLY_RETRY_DELAY,
connectTimeout: dragonflyConfig.DRAGONFLY_CONNECT_TIMEOUT,
commandTimeout: dragonflyConfig.DRAGONFLY_COMMAND_TIMEOUT,
keepAlive: dragonflyConfig.DRAGONFLY_ENABLE_KEEPALIVE ? dragonflyConfig.DRAGONFLY_KEEPALIVE_INTERVAL * 1000 : 0,
...(dragonflyConfig.DRAGONFLY_TLS && {
tls: {
cert: dragonflyConfig.DRAGONFLY_TLS_CERT_FILE || undefined,
key: dragonflyConfig.DRAGONFLY_TLS_KEY_FILE || undefined,
ca: dragonflyConfig.DRAGONFLY_TLS_CA_FILE || undefined,
rejectUnauthorized: !dragonflyConfig.DRAGONFLY_TLS_SKIP_VERIFY,
}
})
};
this.redis = new Redis(redisConfig);
this.setupEventHandlers();
}
private setupEventHandlers(): void {
this.redis.on('connect', () => {
this.isConnected = true;
this.logger.info('Redis cache connected', {
host: dragonflyConfig.DRAGONFLY_HOST,
port: dragonflyConfig.DRAGONFLY_PORT,
db: dragonflyConfig.DRAGONFLY_DATABASE
});
});
this.redis.on('ready', () => {
this.logger.info('Redis cache ready for commands');
});
this.redis.on('error', (error) => {
this.isConnected = false;
this.stats.errors++;
this.logger.error('Redis cache connection error', { error: error.message });
});
this.redis.on('close', () => {
this.isConnected = false;
this.logger.warn('Redis cache connection closed');
});
this.redis.on('reconnecting', () => {
this.logger.info('Redis cache reconnecting...');
});
}
private getKey(key: string): string {
return `${this.keyPrefix}${key}`;
}
private updateStats(hit: boolean, error = false): void {
if (!this.enableMetrics) return;
if (error) {
this.stats.errors++;
} else if (hit) {
this.stats.hits++;
} else {
this.stats.misses++;
}
this.stats.total = this.stats.hits + this.stats.misses;
this.stats.hitRate = this.stats.total > 0 ? this.stats.hits / this.stats.total : 0;
this.stats.uptime = Date.now() - this.startTime;
}
private async safeExecute<T>(
operation: () => Promise<T>,
fallback: T,
operationName: string
): Promise<T> {
if (!this.isConnected) {
this.logger.warn(`Redis not connected for ${operationName}, using fallback`);
this.updateStats(false, true);
return fallback;
}
try {
return await operation();
} catch (error) {
this.logger.error(`Redis ${operationName} failed`, {
error: error instanceof Error ? error.message : String(error)
});
this.updateStats(false, true);
return fallback;
}
}
async get<T>(key: string): Promise<T | null> {
return this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const value = await this.redis.get(fullKey);
if (value === null) {
this.updateStats(false);
this.logger.debug('Cache miss', { key });
return null;
}
this.updateStats(true);
this.logger.debug('Cache hit', { key, hitRate: this.stats.hitRate });
try {
return JSON.parse(value) as T;
} catch {
// Return as-is if not valid JSON
return value as unknown as T;
}
},
null,
'get'
);
}
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
await this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const serialized = typeof value === 'string' ? value : JSON.stringify(value);
const expiry = ttl ?? this.defaultTTL;
await this.redis.setex(fullKey, expiry, serialized);
this.logger.debug('Cache set', { key, ttl: expiry });
},
undefined,
'set'
);
}
async del(key: string): Promise<void> {
await this.safeExecute(
async () => {
const fullKey = this.getKey(key);
await this.redis.del(fullKey);
this.logger.debug('Cache delete', { key });
},
undefined,
'del'
);
}
async exists(key: string): Promise<boolean> {
return this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const result = await this.redis.exists(fullKey);
return result === 1;
},
false,
'exists'
);
}
async clear(): Promise<void> {
await this.safeExecute(
async () => {
const pattern = `${this.keyPrefix}*`;
const keys = await this.redis.keys(pattern);
if (keys.length > 0) {
await this.redis.del(...keys);
this.logger.info('Cache cleared', { keysDeleted: keys.length });
}
},
undefined,
'clear'
);
}
async health(): Promise<boolean> {
try {
const pong = await this.redis.ping();
return pong === 'PONG' && this.isConnected;
} catch (error) {
this.logger.error('Redis health check failed', error);
return false;
}
}
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
};
}
/**
* Trading-specific convenience methods
*/
async cacheMarketData(symbol: string, timeframe: string, data: any[], ttl = 300): Promise<void> {
const key = `market:${symbol}:${timeframe}`;
await this.set(key, data, ttl);
}
async getMarketData<T>(symbol: string, timeframe: string): Promise<T | null> {
const key = `market:${symbol}:${timeframe}`;
return this.get<T>(key);
}
async cacheIndicator(
symbol: string,
indicator: string,
period: number,
data: number[],
ttl = 600
): Promise<void> {
const key = `indicator:${symbol}:${indicator}:${period}`;
await this.set(key, data, ttl);
}
async getIndicator(symbol: string, indicator: string, period: number): Promise<number[] | null> {
const key = `indicator:${symbol}:${indicator}:${period}`;
return this.get<number[]>(key);
}
/**
* Close the Redis connection
*/
async disconnect(): Promise<void> {
await this.redis.quit();
this.logger.info('Redis cache disconnected');
}
}

View file

@ -1,42 +1,42 @@
export interface CacheProvider {
get<T>(key: string): Promise<T | null>;
set<T>(key: string, value: T, ttl?: number): Promise<void>;
del(key: string): Promise<void>;
exists(key: string): Promise<boolean>;
clear(): Promise<void>;
getStats(): CacheStats;
health(): Promise<boolean>;
}
export interface CacheOptions {
ttl?: number;
keyPrefix?: string;
enableMetrics?: boolean;
maxMemoryItems?: number;
memoryTTL?: number;
}
export interface CacheStats {
hits: number;
misses: number;
errors: number;
hitRate: number;
total: number;
uptime: number;
}
export interface CacheConfig {
type: 'redis' | 'memory' | 'hybrid';
keyPrefix?: string;
defaultTTL?: number;
maxMemoryItems?: number;
enableMetrics?: boolean;
compression?: boolean;
}
export type CacheKey = string | (() => string);
export interface SerializationOptions {
compress?: boolean;
binary?: boolean;
}
export interface CacheProvider {
get<T>(key: string): Promise<T | null>;
set<T>(key: string, value: T, ttl?: number): Promise<void>;
del(key: string): Promise<void>;
exists(key: string): Promise<boolean>;
clear(): Promise<void>;
getStats(): CacheStats;
health(): Promise<boolean>;
}
export interface CacheOptions {
ttl?: number;
keyPrefix?: string;
enableMetrics?: boolean;
maxMemoryItems?: number;
memoryTTL?: number;
}
export interface CacheStats {
hits: number;
misses: number;
errors: number;
hitRate: number;
total: number;
uptime: number;
}
export interface CacheConfig {
type: 'redis' | 'memory' | 'hybrid';
keyPrefix?: string;
defaultTTL?: number;
maxMemoryItems?: number;
enableMetrics?: boolean;
compression?: boolean;
}
export type CacheKey = string | (() => string);
export interface SerializationOptions {
compress?: boolean;
binary?: boolean;
}

View file

@ -1,13 +1,13 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" },
{ "path": "../logger" }
]
}
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" },
{ "path": "../logger" }
]
}

20
libs/cache/turbo.json vendored
View file

@ -1,10 +1,10 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}

View file

@ -1,103 +1,103 @@
# @stock-bot/config
A configuration management library for the Stock Bot trading platform.
## Overview
This library provides a centralized way to manage configurations across all Stock Bot microservices and components. It includes:
- Environment-based configuration loading
- Strong TypeScript typing and validation using Zod
- Default configurations for services
- Environment variable parsing helpers
- Service-specific configuration modules
## Usage
### Basic Usage
```typescript
import { databaseConfig, dataProviderConfigs, riskConfig } from '@stock-bot/config';
// Access database configuration
const dragonflyHost = databaseConfig.dragonfly.host;
// Access data provider configuration
const alpacaApiKey = dataProviderConfigs.providers.find(p => p.name === 'alpaca')?.apiKey;
// Access risk configuration
const maxPositionSize = riskConfig.maxPositionSize;
```
### Service-Specific Configuration
```typescript
import { marketDataGatewayConfig, riskGuardianConfig } from '@stock-bot/config';
// Access Market Data Gateway configuration
const websocketPath = marketDataGatewayConfig.websocket.path;
// Access Risk Guardian configuration
const preTradeValidation = riskGuardianConfig.riskChecks.preTradeValidation;
```
### Environment Variables
The library automatically loads environment variables from `.env` files. You can create environment-specific files:
- `.env` - Base environment variables
- `.env.development` - Development-specific variables
- `.env.production` - Production-specific variables
- `.env.local` - Local overrides (not to be committed to git)
## Configuration Modules
### Core Configuration
- `Environment` - Enum for different environments
- `loadEnvVariables()` - Load environment variables from .env files
- `getEnvironment()` - Get the current environment
- `validateConfig()` - Validate configuration with Zod schema
### Database Configuration
- `databaseConfig` - Database connection settings (Dragonfly, QuestDB, MongoDB, PostgreSQL)
### Data Provider Configuration
- `dataProviderConfigs` - Settings for market data providers
### Risk Configuration
- `riskConfig` - Risk management parameters (max drawdown, position size, etc.)
### Service-Specific Configuration
- `marketDataGatewayConfig` - Configs for the Market Data Gateway service
- `riskGuardianConfig` - Configs for the Risk Guardian service
## Extending
To add a new service configuration:
1. Create a new file in `src/services/`
2. Define a Zod schema for validation
3. Create loading and default configuration functions
4. Export from `src/services/index.ts`
5. The new configuration will be automatically available from the main package
## Development
```bash
# Install dependencies
bun install
# Run tests
bun test
# Type check
bun run type-check
# Lint
bun run lint
```
# @stock-bot/config
A configuration management library for the Stock Bot trading platform.
## Overview
This library provides a centralized way to manage configurations across all Stock Bot microservices and components. It includes:
- Environment-based configuration loading
- Strong TypeScript typing and validation using Zod
- Default configurations for services
- Environment variable parsing helpers
- Service-specific configuration modules
## Usage
### Basic Usage
```typescript
import { databaseConfig, dataProviderConfigs, riskConfig } from '@stock-bot/config';
// Access database configuration
const dragonflyHost = databaseConfig.dragonfly.host;
// Access data provider configuration
const alpacaApiKey = dataProviderConfigs.providers.find(p => p.name === 'alpaca')?.apiKey;
// Access risk configuration
const maxPositionSize = riskConfig.maxPositionSize;
```
### Service-Specific Configuration
```typescript
import { marketDataGatewayConfig, riskGuardianConfig } from '@stock-bot/config';
// Access Market Data Gateway configuration
const websocketPath = marketDataGatewayConfig.websocket.path;
// Access Risk Guardian configuration
const preTradeValidation = riskGuardianConfig.riskChecks.preTradeValidation;
```
### Environment Variables
The library automatically loads environment variables from `.env` files. You can create environment-specific files:
- `.env` - Base environment variables
- `.env.development` - Development-specific variables
- `.env.production` - Production-specific variables
- `.env.local` - Local overrides (not to be committed to git)
## Configuration Modules
### Core Configuration
- `Environment` - Enum for different environments
- `loadEnvVariables()` - Load environment variables from .env files
- `getEnvironment()` - Get the current environment
- `validateConfig()` - Validate configuration with Zod schema
### Database Configuration
- `databaseConfig` - Database connection settings (Dragonfly, QuestDB, MongoDB, PostgreSQL)
### Data Provider Configuration
- `dataProviderConfigs` - Settings for market data providers
### Risk Configuration
- `riskConfig` - Risk management parameters (max drawdown, position size, etc.)
### Service-Specific Configuration
- `marketDataGatewayConfig` - Configs for the Market Data Gateway service
- `riskGuardianConfig` - Configs for the Risk Guardian service
## Extending
To add a new service configuration:
1. Create a new file in `src/services/`
2. Define a Zod schema for validation
3. Create loading and default configuration functions
4. Export from `src/services/index.ts`
5. The new configuration will be automatically available from the main package
## Development
```bash
# Install dependencies
bun install
# Run tests
bun test
# Type check
bun run type-check
# Lint
bun run lint
```

View file

@ -1,131 +1,131 @@
# Stock Bot Configuration Library Usage Guide
This guide shows how to use the Zod-based configuration system in the Stock Bot platform.
## Quick Start
```typescript
import { databaseConfig, loggingConfig, riskConfig, dataProvidersConfig } from '@stock-bot/config';
// Access individual values
console.log(`Database: ${databaseConfig.POSTGRES_HOST}:${databaseConfig.POSTGRES_PORT}`);
console.log(`Log level: ${loggingConfig.LOG_LEVEL}`);
console.log(`Max position size: ${riskConfig.RISK_MAX_POSITION_SIZE}`);
```
## Environment Variables
All configuration is driven by environment variables. You can set them in:
- `.env` files
- System environment variables
- Docker environment variables
### Database Configuration
```bash
DB_HOST=localhost
DB_PORT=5432
DB_NAME=stockbot
DB_USER=stockbot
DB_PASSWORD=your_password
DB_SSL=false
DB_POOL_MAX=10
```
### Logging Configuration
```bash
LOG_LEVEL=info
LOG_CONSOLE=true
LOKI_HOST=localhost
LOKI_PORT=3100
LOKI_LABELS=service=market-data-gateway,version=1.0.0
```
### Risk Management Configuration
```bash
RISK_MAX_POSITION_SIZE=0.1
RISK_DEFAULT_STOP_LOSS=0.05
RISK_DEFAULT_TAKE_PROFIT=0.15
RISK_CIRCUIT_BREAKER_ENABLED=true
```
### Data Provider Configuration
```bash
DEFAULT_DATA_PROVIDER=alpaca
ALPACA_API_KEY=your_api_key
ALPACA_API_SECRET=your_api_secret
ALPACA_ENABLED=true
POLYGON_ENABLED=false
```
## Advanced Usage
### Type Safety
All configurations are fully typed:
```typescript
import type { DatabaseConfig, LoggingConfig, RiskConfig } from '@stock-bot/config';
function setupDatabase(config: DatabaseConfig) {
// TypeScript knows all the available properties
return {
host: config.POSTGRES_HOST,
port: config.POSTGRES_PORT, // number
ssl: config.POSTGRES_SSL, // boolean
};
}
```
### Environment Detection
```typescript
import { getEnvironment, Environment } from '@stock-bot/config';
const env = getEnvironment();
if (env === Environment.Production) {
// Production-specific logic
}
```
### Data Provider Helpers
```typescript
import { getProviderConfig, getEnabledProviders, getDefaultProvider } from '@stock-bot/config';
// Get specific provider
const alpaca = getProviderConfig('alpaca');
// Get all enabled providers
const providers = getEnabledProviders();
// Get default provider
const defaultProvider = getDefaultProvider();
```
## Configuration Files
The library consists of these modules:
- **core.ts** - Core utilities and environment detection
- **database.ts** - Database connection settings
- **logging.ts** - Logging and Loki configuration
- **risk.ts** - Risk management parameters
- **data-providers.ts** - Data provider settings
## Benefits of This Approach
1. **Zero Configuration Schema** - No complex schema definitions needed
2. **Automatic Type Inference** - TypeScript types are generated automatically
3. **Environment Variable Validation** - Invalid values are caught at startup
4. **Great Developer Experience** - IntelliSense works perfectly
5. **Production Ready** - Used by many large-scale applications
## Migration from Previous System
If you're migrating from the old Valibot-based system:
```typescript
// Old way
const config = createConfigLoader('database', databaseSchema, defaultConfig)();
// New way
import { databaseConfig } from '@stock-bot/config';
// That's it! No schema needed, no validation needed, no complex setup.
```
# Stock Bot Configuration Library Usage Guide
This guide shows how to use the Zod-based configuration system in the Stock Bot platform.
## Quick Start
```typescript
import { databaseConfig, loggingConfig, riskConfig, dataProvidersConfig } from '@stock-bot/config';
// Access individual values
console.log(`Database: ${databaseConfig.POSTGRES_HOST}:${databaseConfig.POSTGRES_PORT}`);
console.log(`Log level: ${loggingConfig.LOG_LEVEL}`);
console.log(`Max position size: ${riskConfig.RISK_MAX_POSITION_SIZE}`);
```
## Environment Variables
All configuration is driven by environment variables. You can set them in:
- `.env` files
- System environment variables
- Docker environment variables
### Database Configuration
```bash
DB_HOST=localhost
DB_PORT=5432
DB_NAME=stockbot
DB_USER=stockbot
DB_PASSWORD=your_password
DB_SSL=false
DB_POOL_MAX=10
```
### Logging Configuration
```bash
LOG_LEVEL=info
LOG_CONSOLE=true
LOKI_HOST=localhost
LOKI_PORT=3100
LOKI_LABELS=service=market-data-gateway,version=1.0.0
```
### Risk Management Configuration
```bash
RISK_MAX_POSITION_SIZE=0.1
RISK_DEFAULT_STOP_LOSS=0.05
RISK_DEFAULT_TAKE_PROFIT=0.15
RISK_CIRCUIT_BREAKER_ENABLED=true
```
### Data Provider Configuration
```bash
DEFAULT_DATA_PROVIDER=alpaca
ALPACA_API_KEY=your_api_key
ALPACA_API_SECRET=your_api_secret
ALPACA_ENABLED=true
POLYGON_ENABLED=false
```
## Advanced Usage
### Type Safety
All configurations are fully typed:
```typescript
import type { DatabaseConfig, LoggingConfig, RiskConfig } from '@stock-bot/config';
function setupDatabase(config: DatabaseConfig) {
// TypeScript knows all the available properties
return {
host: config.POSTGRES_HOST,
port: config.POSTGRES_PORT, // number
ssl: config.POSTGRES_SSL, // boolean
};
}
```
### Environment Detection
```typescript
import { getEnvironment, Environment } from '@stock-bot/config';
const env = getEnvironment();
if (env === Environment.Production) {
// Production-specific logic
}
```
### Data Provider Helpers
```typescript
import { getProviderConfig, getEnabledProviders, getDefaultProvider } from '@stock-bot/config';
// Get specific provider
const alpaca = getProviderConfig('alpaca');
// Get all enabled providers
const providers = getEnabledProviders();
// Get default provider
const defaultProvider = getDefaultProvider();
```
## Configuration Files
The library consists of these modules:
- **core.ts** - Core utilities and environment detection
- **database.ts** - Database connection settings
- **logging.ts** - Logging and Loki configuration
- **risk.ts** - Risk management parameters
- **data-providers.ts** - Data provider settings
## Benefits of This Approach
1. **Zero Configuration Schema** - No complex schema definitions needed
2. **Automatic Type Inference** - TypeScript types are generated automatically
3. **Environment Variable Validation** - Invalid values are caught at startup
4. **Great Developer Experience** - IntelliSense works perfectly
5. **Production Ready** - Used by many large-scale applications
## Migration from Previous System
If you're migrating from the old Valibot-based system:
```typescript
// Old way
const config = createConfigLoader('database', databaseSchema, defaultConfig)();
// New way
import { databaseConfig } from '@stock-bot/config';
// That's it! No schema needed, no validation needed, no complex setup.
```

View file

@ -1,15 +1,15 @@
[test]
# Configure path mapping for tests
preload = ["./test/setup.ts"]
# Test configuration
timeout = 5000
# Set test environment
env = { NODE_ENV = "test" }
[bun]
# Enable TypeScript paths resolution
paths = {
"@/*" = ["./src/*"]
}
[test]
# Configure path mapping for tests
preload = ["./test/setup.ts"]
# Test configuration
timeout = 5000
# Set test environment
env = { NODE_ENV = "test" }
[bun]
# Enable TypeScript paths resolution
paths = {
"@/*" = ["./src/*"]
}

View file

@ -1,44 +1,44 @@
{
"name": "@stock-bot/config",
"version": "1.0.0",
"description": "Configuration management library for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": {
"dotenv": "^16.5.0",
"yup": "^1.6.1"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"keywords": [
"configuration",
"settings",
"env",
"stock-bot"
],
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}
{
"name": "@stock-bot/config",
"version": "1.0.0",
"description": "Configuration management library for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": {
"dotenv": "^16.5.0",
"yup": "^1.6.1"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"keywords": [
"configuration",
"settings",
"env",
"stock-bot"
],
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}

View file

@ -1,24 +1,24 @@
@echo off
echo Building @stock-bot/config library...
cd /d g:\repos\stock-bot
echo Installing dependencies...
bun install
echo Running type check...
cd /d g:\repos\stock-bot\libs\config
bun run type-check
echo Running tests...
bun test
echo Setting up example configuration...
copy .env.example .env
echo Running example to display configuration...
bun run src/example.ts
echo.
echo Configuration library setup complete!
echo.
echo You can now import @stock-bot/config in your services.
@echo off
echo Building @stock-bot/config library...
cd /d g:\repos\stock-bot
echo Installing dependencies...
bun install
echo Running type check...
cd /d g:\repos\stock-bot\libs\config
bun run type-check
echo Running tests...
bun test
echo Setting up example configuration...
copy .env.example .env
echo Running example to display configuration...
bun run src/example.ts
echo.
echo Configuration library setup complete!
echo.
echo You can now import @stock-bot/config in your services.

View file

@ -1,111 +1,111 @@
/**
* Admin interfaces configuration using Yup
* PgAdmin, Mongo Express, Redis Insight for database management
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, strWithChoices } = envValidators;
/**
* PgAdmin configuration with validation and defaults
*/
export const pgAdminConfig = cleanEnv(process.env, {
// PgAdmin Server
PGADMIN_HOST: str('localhost', 'PgAdmin host'),
PGADMIN_PORT: port(8080, 'PgAdmin port'),
// Authentication
PGADMIN_DEFAULT_EMAIL: str('admin@tradingbot.local', 'PgAdmin default admin email'),
PGADMIN_DEFAULT_PASSWORD: str('admin123', 'PgAdmin default admin password'),
// Configuration
PGADMIN_SERVER_MODE: bool(false, 'Enable server mode (multi-user)'),
PGADMIN_DISABLE_POSTFIX: bool(true, 'Disable postfix for email'),
PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION: bool(true, 'Enhanced cookie protection'),
// Security
PGADMIN_MASTER_PASSWORD_REQUIRED: bool(false, 'Require master password'),
PGADMIN_SESSION_TIMEOUT: str('60', 'Session timeout in minutes'),
});
/**
* Mongo Express configuration with validation and defaults
*/
export const mongoExpressConfig = cleanEnv(process.env, {
// Mongo Express Server
MONGO_EXPRESS_HOST: str('localhost', 'Mongo Express host'),
MONGO_EXPRESS_PORT: port(8081, 'Mongo Express port'),
// MongoDB Connection
MONGO_EXPRESS_MONGODB_SERVER: str('mongodb', 'MongoDB server name/host'),
MONGO_EXPRESS_MONGODB_PORT: port(27017, 'MongoDB port'),
MONGO_EXPRESS_MONGODB_ADMINUSERNAME: str('trading_admin', 'MongoDB admin username'),
MONGO_EXPRESS_MONGODB_ADMINPASSWORD: str('', 'MongoDB admin password'),
// Basic Authentication for Mongo Express
MONGO_EXPRESS_BASICAUTH_USERNAME: str('admin', 'Basic auth username for Mongo Express'),
MONGO_EXPRESS_BASICAUTH_PASSWORD: str('admin123', 'Basic auth password for Mongo Express'),
// Configuration
MONGO_EXPRESS_ENABLE_ADMIN: bool(true, 'Enable admin features'),
MONGO_EXPRESS_OPTIONS_EDITOR_THEME: str('rubyblue', 'Editor theme (rubyblue, 3024-night, etc.)'),
MONGO_EXPRESS_REQUEST_SIZE: str('100kb', 'Maximum request size'),
});
/**
* Redis Insight configuration with validation and defaults
*/
export const redisInsightConfig = cleanEnv(process.env, {
// Redis Insight Server
REDIS_INSIGHT_HOST: str('localhost', 'Redis Insight host'),
REDIS_INSIGHT_PORT: port(8001, 'Redis Insight port'),
// Redis Connection Settings
REDIS_INSIGHT_REDIS_HOSTS: str('local:dragonfly:6379', 'Redis hosts in format name:host:port,name:host:port'),
// Configuration
REDIS_INSIGHT_LOG_LEVEL: strWithChoices(['error', 'warn', 'info', 'verbose', 'debug'], 'info', 'Redis Insight log level'),
REDIS_INSIGHT_DISABLE_ANALYTICS: bool(true, 'Disable analytics collection'),
REDIS_INSIGHT_BUILD_TYPE: str('DOCKER', 'Build type identifier'),
});
// Export typed configuration objects
export type PgAdminConfig = typeof pgAdminConfig;
export type MongoExpressConfig = typeof mongoExpressConfig;
export type RedisInsightConfig = typeof redisInsightConfig;
// Export individual config values for convenience
export const {
PGADMIN_HOST,
PGADMIN_PORT,
PGADMIN_DEFAULT_EMAIL,
PGADMIN_DEFAULT_PASSWORD,
PGADMIN_SERVER_MODE,
PGADMIN_DISABLE_POSTFIX,
PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION,
PGADMIN_MASTER_PASSWORD_REQUIRED,
PGADMIN_SESSION_TIMEOUT,
} = pgAdminConfig;
export const {
MONGO_EXPRESS_HOST,
MONGO_EXPRESS_PORT,
MONGO_EXPRESS_MONGODB_SERVER,
MONGO_EXPRESS_MONGODB_PORT,
MONGO_EXPRESS_MONGODB_ADMINUSERNAME,
MONGO_EXPRESS_MONGODB_ADMINPASSWORD,
MONGO_EXPRESS_BASICAUTH_USERNAME,
MONGO_EXPRESS_BASICAUTH_PASSWORD,
MONGO_EXPRESS_ENABLE_ADMIN,
MONGO_EXPRESS_OPTIONS_EDITOR_THEME,
MONGO_EXPRESS_REQUEST_SIZE,
} = mongoExpressConfig;
export const {
REDIS_INSIGHT_HOST,
REDIS_INSIGHT_PORT,
REDIS_INSIGHT_REDIS_HOSTS,
REDIS_INSIGHT_LOG_LEVEL,
REDIS_INSIGHT_DISABLE_ANALYTICS,
REDIS_INSIGHT_BUILD_TYPE,
} = redisInsightConfig;
/**
* Admin interfaces configuration using Yup
* PgAdmin, Mongo Express, Redis Insight for database management
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, strWithChoices } = envValidators;
/**
* PgAdmin configuration with validation and defaults
*/
export const pgAdminConfig = cleanEnv(process.env, {
// PgAdmin Server
PGADMIN_HOST: str('localhost', 'PgAdmin host'),
PGADMIN_PORT: port(8080, 'PgAdmin port'),
// Authentication
PGADMIN_DEFAULT_EMAIL: str('admin@tradingbot.local', 'PgAdmin default admin email'),
PGADMIN_DEFAULT_PASSWORD: str('admin123', 'PgAdmin default admin password'),
// Configuration
PGADMIN_SERVER_MODE: bool(false, 'Enable server mode (multi-user)'),
PGADMIN_DISABLE_POSTFIX: bool(true, 'Disable postfix for email'),
PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION: bool(true, 'Enhanced cookie protection'),
// Security
PGADMIN_MASTER_PASSWORD_REQUIRED: bool(false, 'Require master password'),
PGADMIN_SESSION_TIMEOUT: str('60', 'Session timeout in minutes'),
});
/**
* Mongo Express configuration with validation and defaults
*/
export const mongoExpressConfig = cleanEnv(process.env, {
// Mongo Express Server
MONGO_EXPRESS_HOST: str('localhost', 'Mongo Express host'),
MONGO_EXPRESS_PORT: port(8081, 'Mongo Express port'),
// MongoDB Connection
MONGO_EXPRESS_MONGODB_SERVER: str('mongodb', 'MongoDB server name/host'),
MONGO_EXPRESS_MONGODB_PORT: port(27017, 'MongoDB port'),
MONGO_EXPRESS_MONGODB_ADMINUSERNAME: str('trading_admin', 'MongoDB admin username'),
MONGO_EXPRESS_MONGODB_ADMINPASSWORD: str('', 'MongoDB admin password'),
// Basic Authentication for Mongo Express
MONGO_EXPRESS_BASICAUTH_USERNAME: str('admin', 'Basic auth username for Mongo Express'),
MONGO_EXPRESS_BASICAUTH_PASSWORD: str('admin123', 'Basic auth password for Mongo Express'),
// Configuration
MONGO_EXPRESS_ENABLE_ADMIN: bool(true, 'Enable admin features'),
MONGO_EXPRESS_OPTIONS_EDITOR_THEME: str('rubyblue', 'Editor theme (rubyblue, 3024-night, etc.)'),
MONGO_EXPRESS_REQUEST_SIZE: str('100kb', 'Maximum request size'),
});
/**
* Redis Insight configuration with validation and defaults
*/
export const redisInsightConfig = cleanEnv(process.env, {
// Redis Insight Server
REDIS_INSIGHT_HOST: str('localhost', 'Redis Insight host'),
REDIS_INSIGHT_PORT: port(8001, 'Redis Insight port'),
// Redis Connection Settings
REDIS_INSIGHT_REDIS_HOSTS: str('local:dragonfly:6379', 'Redis hosts in format name:host:port,name:host:port'),
// Configuration
REDIS_INSIGHT_LOG_LEVEL: strWithChoices(['error', 'warn', 'info', 'verbose', 'debug'], 'info', 'Redis Insight log level'),
REDIS_INSIGHT_DISABLE_ANALYTICS: bool(true, 'Disable analytics collection'),
REDIS_INSIGHT_BUILD_TYPE: str('DOCKER', 'Build type identifier'),
});
// Export typed configuration objects
export type PgAdminConfig = typeof pgAdminConfig;
export type MongoExpressConfig = typeof mongoExpressConfig;
export type RedisInsightConfig = typeof redisInsightConfig;
// Export individual config values for convenience
export const {
PGADMIN_HOST,
PGADMIN_PORT,
PGADMIN_DEFAULT_EMAIL,
PGADMIN_DEFAULT_PASSWORD,
PGADMIN_SERVER_MODE,
PGADMIN_DISABLE_POSTFIX,
PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION,
PGADMIN_MASTER_PASSWORD_REQUIRED,
PGADMIN_SESSION_TIMEOUT,
} = pgAdminConfig;
export const {
MONGO_EXPRESS_HOST,
MONGO_EXPRESS_PORT,
MONGO_EXPRESS_MONGODB_SERVER,
MONGO_EXPRESS_MONGODB_PORT,
MONGO_EXPRESS_MONGODB_ADMINUSERNAME,
MONGO_EXPRESS_MONGODB_ADMINPASSWORD,
MONGO_EXPRESS_BASICAUTH_USERNAME,
MONGO_EXPRESS_BASICAUTH_PASSWORD,
MONGO_EXPRESS_ENABLE_ADMIN,
MONGO_EXPRESS_OPTIONS_EDITOR_THEME,
MONGO_EXPRESS_REQUEST_SIZE,
} = mongoExpressConfig;
export const {
REDIS_INSIGHT_HOST,
REDIS_INSIGHT_PORT,
REDIS_INSIGHT_REDIS_HOSTS,
REDIS_INSIGHT_LOG_LEVEL,
REDIS_INSIGHT_DISABLE_ANALYTICS,
REDIS_INSIGHT_BUILD_TYPE,
} = redisInsightConfig;

View file

@ -1,68 +1,68 @@
/**
* Core configuration module for the Stock Bot platform using Yup
*/
import { config as dotenvConfig } from 'dotenv';
import path from 'node:path';
/**
* Represents an error related to configuration validation
*/
export class ConfigurationError extends Error {
constructor(message: string) {
super(message);
this.name = 'ConfigurationError';
}
}
/**
* Environment types
*/
export enum Environment {
Development = 'development',
Testing = 'testing',
Staging = 'staging',
Production = 'production'
}
/**
* Loads environment variables from .env files based on the current environment
*/
export function loadEnvVariables(envOverride?: string): void {
const env = envOverride || process.env.NODE_ENV || 'development';
console.log(`Current environment: ${env}`);
// Order of loading:
// 1. .env (base environment variables)
// 2. .env.{environment} (environment-specific variables)
// 3. .env.local (local overrides, not to be committed)
const envFiles = [
'.env',
`.env.${env}`,
'.env.local'
];
for (const file of envFiles) {
dotenvConfig({ path: path.resolve(process.cwd(), file) });
}
}
/**
* Gets the current environment from process.env.NODE_ENV
*/
export function getEnvironment(): Environment {
const env = process.env.NODE_ENV?.toLowerCase() || 'development';
switch (env) {
case 'development':
return Environment.Development;
case 'testing':
case 'test': // Handle both 'test' and 'testing' for compatibility
return Environment.Testing;
case 'staging':
return Environment.Staging;
case 'production':
return Environment.Production;
default:
return Environment.Development;
}
}
/**
* Core configuration module for the Stock Bot platform using Yup
*/
import { config as dotenvConfig } from 'dotenv';
import path from 'node:path';
/**
* Represents an error related to configuration validation
*/
export class ConfigurationError extends Error {
constructor(message: string) {
super(message);
this.name = 'ConfigurationError';
}
}
/**
* Environment types
*/
export enum Environment {
Development = 'development',
Testing = 'testing',
Staging = 'staging',
Production = 'production'
}
/**
* Loads environment variables from .env files based on the current environment
*/
export function loadEnvVariables(envOverride?: string): void {
const env = envOverride || process.env.NODE_ENV || 'development';
console.log(`Current environment: ${env}`);
// Order of loading:
// 1. .env (base environment variables)
// 2. .env.{environment} (environment-specific variables)
// 3. .env.local (local overrides, not to be committed)
const envFiles = [
'.env',
`.env.${env}`,
'.env.local'
];
for (const file of envFiles) {
dotenvConfig({ path: path.resolve(process.cwd(), file) });
}
}
/**
* Gets the current environment from process.env.NODE_ENV
*/
export function getEnvironment(): Environment {
const env = process.env.NODE_ENV?.toLowerCase() || 'development';
switch (env) {
case 'development':
return Environment.Development;
case 'testing':
case 'test': // Handle both 'test' and 'testing' for compatibility
return Environment.Testing;
case 'staging':
return Environment.Staging;
case 'production':
return Environment.Production;
default:
return Environment.Development;
}
}

View file

@ -1,184 +1,184 @@
/**
* Data provider configurations using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, num, bool, strWithChoices } = envValidators;
export interface ProviderConfig {
name: string;
type: 'rest' | 'websocket';
enabled: boolean;
baseUrl?: string;
apiKey?: string;
apiSecret?: string;
rateLimits?: {
maxRequestsPerMinute?: number;
maxRequestsPerSecond?: number;
maxRequestsPerHour?: number;
};
}
/**
* Data providers configuration with validation and defaults
*/
export const dataProvidersConfig = cleanEnv(process.env, {
// Default Provider
DEFAULT_DATA_PROVIDER: strWithChoices(['alpaca', 'polygon', 'yahoo', 'iex'], 'alpaca', 'Default data provider'),
// Alpaca Configuration
ALPACA_API_KEY: str('', 'Alpaca API key'),
ALPACA_API_SECRET: str('', 'Alpaca API secret'),
ALPACA_BASE_URL: str('https://data.alpaca.markets/v1beta1', 'Alpaca base URL'),
ALPACA_RATE_LIMIT: num(200, 'Alpaca rate limit per minute'),
ALPACA_ENABLED: bool(true, 'Enable Alpaca provider'),
// Polygon Configuration
POLYGON_API_KEY: str('', 'Polygon API key'),
POLYGON_BASE_URL: str('https://api.polygon.io', 'Polygon base URL'),
POLYGON_RATE_LIMIT: num(5, 'Polygon rate limit per minute'),
POLYGON_ENABLED: bool(false, 'Enable Polygon provider'),
// Yahoo Finance Configuration
YAHOO_BASE_URL: str('https://query1.finance.yahoo.com', 'Yahoo Finance base URL'),
YAHOO_RATE_LIMIT: num(2000, 'Yahoo Finance rate limit per hour'),
YAHOO_ENABLED: bool(true, 'Enable Yahoo Finance provider'),
// IEX Cloud Configuration
IEX_API_KEY: str('', 'IEX Cloud API key'),
IEX_BASE_URL: str('https://cloud.iexapis.com/stable', 'IEX Cloud base URL'),
IEX_RATE_LIMIT: num(100, 'IEX Cloud rate limit per second'),
IEX_ENABLED: bool(false, 'Enable IEX Cloud provider'),
// Connection Settings
DATA_PROVIDER_TIMEOUT: num(30000, 'Request timeout in milliseconds'),
DATA_PROVIDER_RETRIES: num(3, 'Number of retry attempts'),
DATA_PROVIDER_RETRY_DELAY: num(1000, 'Retry delay in milliseconds'),
// Cache Settings
DATA_CACHE_ENABLED: bool(true, 'Enable data caching'),
DATA_CACHE_TTL: num(300000, 'Cache TTL in milliseconds'),
DATA_CACHE_MAX_SIZE: num(1000, 'Maximum cache entries'),
});
/**
* Helper function to get provider-specific configuration
*/
export function getProviderConfig(providerName: string) {
// make a interface for the provider config
const name = providerName.toUpperCase();
switch (name) {
case 'ALPACA':
return {
name: 'alpaca',
type: 'rest' as const,
enabled: dataProvidersConfig.ALPACA_ENABLED,
baseUrl: dataProvidersConfig.ALPACA_BASE_URL,
apiKey: dataProvidersConfig.ALPACA_API_KEY,
apiSecret: dataProvidersConfig.ALPACA_API_SECRET,
rateLimits: {
maxRequestsPerMinute: dataProvidersConfig.ALPACA_RATE_LIMIT
}
};
case 'POLYGON':
return {
name: 'polygon',
type: 'rest' as const,
enabled: dataProvidersConfig.POLYGON_ENABLED,
baseUrl: dataProvidersConfig.POLYGON_BASE_URL,
apiKey: dataProvidersConfig.POLYGON_API_KEY,
rateLimits: {
maxRequestsPerMinute: dataProvidersConfig.POLYGON_RATE_LIMIT
}
};
case 'YAHOO':
return {
name: 'yahoo',
type: 'rest' as const,
enabled: dataProvidersConfig.YAHOO_ENABLED,
baseUrl: dataProvidersConfig.YAHOO_BASE_URL,
rateLimits: {
maxRequestsPerHour: dataProvidersConfig.YAHOO_RATE_LIMIT
}
};
case 'IEX':
return {
name: 'iex',
type: 'rest' as const,
enabled: dataProvidersConfig.IEX_ENABLED,
baseUrl: dataProvidersConfig.IEX_BASE_URL,
apiKey: dataProvidersConfig.IEX_API_KEY,
rateLimits: {
maxRequestsPerSecond: dataProvidersConfig.IEX_RATE_LIMIT
}
};
default:
throw new Error(`Unknown provider: ${providerName}`);
}
}
/**
* Get all enabled providers
*/
export function getEnabledProviders() {
const providers = ['alpaca', 'polygon', 'yahoo', 'iex'];
return providers
.map(provider => getProviderConfig(provider))
.filter(config => config.enabled);
}
/**
* Get the default provider configuration
*/
export function getDefaultProvider() {
return getProviderConfig(dataProvidersConfig.DEFAULT_DATA_PROVIDER);
}
// Export typed configuration object
export type DataProvidersConfig = typeof dataProvidersConfig;
export class DataProviders {
static getProviderConfig(providerName: string): ProviderConfig {
return getProviderConfig(providerName);
}
static getEnabledProviders(): ProviderConfig[] {
return getEnabledProviders();
}
static getDefaultProvider(): ProviderConfig {
return getDefaultProvider();
}
}
// Export individual config values for convenience
export const {
DEFAULT_DATA_PROVIDER,
ALPACA_API_KEY,
ALPACA_API_SECRET,
ALPACA_BASE_URL,
ALPACA_RATE_LIMIT,
ALPACA_ENABLED,
POLYGON_API_KEY,
POLYGON_BASE_URL,
POLYGON_RATE_LIMIT,
POLYGON_ENABLED,
YAHOO_BASE_URL,
YAHOO_RATE_LIMIT,
YAHOO_ENABLED,
IEX_API_KEY,
IEX_BASE_URL,
IEX_RATE_LIMIT,
IEX_ENABLED,
DATA_PROVIDER_TIMEOUT,
DATA_PROVIDER_RETRIES,
DATA_PROVIDER_RETRY_DELAY,
DATA_CACHE_ENABLED,
DATA_CACHE_TTL,
DATA_CACHE_MAX_SIZE,
} = dataProvidersConfig;
/**
* Data provider configurations using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, num, bool, strWithChoices } = envValidators;
export interface ProviderConfig {
name: string;
type: 'rest' | 'websocket';
enabled: boolean;
baseUrl?: string;
apiKey?: string;
apiSecret?: string;
rateLimits?: {
maxRequestsPerMinute?: number;
maxRequestsPerSecond?: number;
maxRequestsPerHour?: number;
};
}
/**
* Data providers configuration with validation and defaults
*/
export const dataProvidersConfig = cleanEnv(process.env, {
// Default Provider
DEFAULT_DATA_PROVIDER: strWithChoices(['alpaca', 'polygon', 'yahoo', 'iex'], 'alpaca', 'Default data provider'),
// Alpaca Configuration
ALPACA_API_KEY: str('', 'Alpaca API key'),
ALPACA_API_SECRET: str('', 'Alpaca API secret'),
ALPACA_BASE_URL: str('https://data.alpaca.markets/v1beta1', 'Alpaca base URL'),
ALPACA_RATE_LIMIT: num(200, 'Alpaca rate limit per minute'),
ALPACA_ENABLED: bool(true, 'Enable Alpaca provider'),
// Polygon Configuration
POLYGON_API_KEY: str('', 'Polygon API key'),
POLYGON_BASE_URL: str('https://api.polygon.io', 'Polygon base URL'),
POLYGON_RATE_LIMIT: num(5, 'Polygon rate limit per minute'),
POLYGON_ENABLED: bool(false, 'Enable Polygon provider'),
// Yahoo Finance Configuration
YAHOO_BASE_URL: str('https://query1.finance.yahoo.com', 'Yahoo Finance base URL'),
YAHOO_RATE_LIMIT: num(2000, 'Yahoo Finance rate limit per hour'),
YAHOO_ENABLED: bool(true, 'Enable Yahoo Finance provider'),
// IEX Cloud Configuration
IEX_API_KEY: str('', 'IEX Cloud API key'),
IEX_BASE_URL: str('https://cloud.iexapis.com/stable', 'IEX Cloud base URL'),
IEX_RATE_LIMIT: num(100, 'IEX Cloud rate limit per second'),
IEX_ENABLED: bool(false, 'Enable IEX Cloud provider'),
// Connection Settings
DATA_PROVIDER_TIMEOUT: num(30000, 'Request timeout in milliseconds'),
DATA_PROVIDER_RETRIES: num(3, 'Number of retry attempts'),
DATA_PROVIDER_RETRY_DELAY: num(1000, 'Retry delay in milliseconds'),
// Cache Settings
DATA_CACHE_ENABLED: bool(true, 'Enable data caching'),
DATA_CACHE_TTL: num(300000, 'Cache TTL in milliseconds'),
DATA_CACHE_MAX_SIZE: num(1000, 'Maximum cache entries'),
});
/**
* Helper function to get provider-specific configuration
*/
export function getProviderConfig(providerName: string) {
// make a interface for the provider config
const name = providerName.toUpperCase();
switch (name) {
case 'ALPACA':
return {
name: 'alpaca',
type: 'rest' as const,
enabled: dataProvidersConfig.ALPACA_ENABLED,
baseUrl: dataProvidersConfig.ALPACA_BASE_URL,
apiKey: dataProvidersConfig.ALPACA_API_KEY,
apiSecret: dataProvidersConfig.ALPACA_API_SECRET,
rateLimits: {
maxRequestsPerMinute: dataProvidersConfig.ALPACA_RATE_LIMIT
}
};
case 'POLYGON':
return {
name: 'polygon',
type: 'rest' as const,
enabled: dataProvidersConfig.POLYGON_ENABLED,
baseUrl: dataProvidersConfig.POLYGON_BASE_URL,
apiKey: dataProvidersConfig.POLYGON_API_KEY,
rateLimits: {
maxRequestsPerMinute: dataProvidersConfig.POLYGON_RATE_LIMIT
}
};
case 'YAHOO':
return {
name: 'yahoo',
type: 'rest' as const,
enabled: dataProvidersConfig.YAHOO_ENABLED,
baseUrl: dataProvidersConfig.YAHOO_BASE_URL,
rateLimits: {
maxRequestsPerHour: dataProvidersConfig.YAHOO_RATE_LIMIT
}
};
case 'IEX':
return {
name: 'iex',
type: 'rest' as const,
enabled: dataProvidersConfig.IEX_ENABLED,
baseUrl: dataProvidersConfig.IEX_BASE_URL,
apiKey: dataProvidersConfig.IEX_API_KEY,
rateLimits: {
maxRequestsPerSecond: dataProvidersConfig.IEX_RATE_LIMIT
}
};
default:
throw new Error(`Unknown provider: ${providerName}`);
}
}
/**
* Get all enabled providers
*/
export function getEnabledProviders() {
const providers = ['alpaca', 'polygon', 'yahoo', 'iex'];
return providers
.map(provider => getProviderConfig(provider))
.filter(config => config.enabled);
}
/**
* Get the default provider configuration
*/
export function getDefaultProvider() {
return getProviderConfig(dataProvidersConfig.DEFAULT_DATA_PROVIDER);
}
// Export typed configuration object
export type DataProvidersConfig = typeof dataProvidersConfig;
export class DataProviders {
static getProviderConfig(providerName: string): ProviderConfig {
return getProviderConfig(providerName);
}
static getEnabledProviders(): ProviderConfig[] {
return getEnabledProviders();
}
static getDefaultProvider(): ProviderConfig {
return getDefaultProvider();
}
}
// Export individual config values for convenience
export const {
DEFAULT_DATA_PROVIDER,
ALPACA_API_KEY,
ALPACA_API_SECRET,
ALPACA_BASE_URL,
ALPACA_RATE_LIMIT,
ALPACA_ENABLED,
POLYGON_API_KEY,
POLYGON_BASE_URL,
POLYGON_RATE_LIMIT,
POLYGON_ENABLED,
YAHOO_BASE_URL,
YAHOO_RATE_LIMIT,
YAHOO_ENABLED,
IEX_API_KEY,
IEX_BASE_URL,
IEX_RATE_LIMIT,
IEX_ENABLED,
DATA_PROVIDER_TIMEOUT,
DATA_PROVIDER_RETRIES,
DATA_PROVIDER_RETRY_DELAY,
DATA_CACHE_ENABLED,
DATA_CACHE_TTL,
DATA_CACHE_MAX_SIZE,
} = dataProvidersConfig;

View file

@ -1,56 +1,56 @@
/**
* Database configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, num, bool } = envValidators;
/**
* Database configuration with validation and defaults
*/
export const databaseConfig = cleanEnv(process.env, {
// PostgreSQL Configuration
DB_HOST: str('localhost', 'Database host'),
DB_PORT: port(5432, 'Database port'),
DB_NAME: str('stockbot', 'Database name'),
DB_USER: str('stockbot', 'Database user'),
DB_PASSWORD: str('', 'Database password'),
// Connection Pool Settings
DB_POOL_MIN: num(2, 'Minimum pool connections'),
DB_POOL_MAX: num(10, 'Maximum pool connections'),
DB_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'),
// SSL Configuration
DB_SSL: bool(false, 'Enable SSL for database connection'),
DB_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'),
// Additional Settings
DB_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'),
DB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
DB_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'),
DB_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'),
DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'),
});
// Export typed configuration object
export type DatabaseConfig = typeof databaseConfig;
// Export individual config values for convenience
export const {
DB_HOST,
DB_PORT,
DB_NAME,
DB_USER,
DB_PASSWORD,
DB_POOL_MIN,
DB_POOL_MAX,
DB_POOL_IDLE_TIMEOUT,
DB_SSL,
DB_SSL_REJECT_UNAUTHORIZED,
DB_QUERY_TIMEOUT,
DB_CONNECTION_TIMEOUT,
DB_STATEMENT_TIMEOUT,
DB_LOCK_TIMEOUT,
DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
} = databaseConfig;
/**
* Database configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, num, bool } = envValidators;
/**
* Database configuration with validation and defaults
*/
export const databaseConfig = cleanEnv(process.env, {
// PostgreSQL Configuration
DB_HOST: str('localhost', 'Database host'),
DB_PORT: port(5432, 'Database port'),
DB_NAME: str('stockbot', 'Database name'),
DB_USER: str('stockbot', 'Database user'),
DB_PASSWORD: str('', 'Database password'),
// Connection Pool Settings
DB_POOL_MIN: num(2, 'Minimum pool connections'),
DB_POOL_MAX: num(10, 'Maximum pool connections'),
DB_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'),
// SSL Configuration
DB_SSL: bool(false, 'Enable SSL for database connection'),
DB_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'),
// Additional Settings
DB_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'),
DB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
DB_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'),
DB_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'),
DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'),
});
// Export typed configuration object
export type DatabaseConfig = typeof databaseConfig;
// Export individual config values for convenience
export const {
DB_HOST,
DB_PORT,
DB_NAME,
DB_USER,
DB_PASSWORD,
DB_POOL_MIN,
DB_POOL_MAX,
DB_POOL_IDLE_TIMEOUT,
DB_SSL,
DB_SSL_REJECT_UNAUTHORIZED,
DB_QUERY_TIMEOUT,
DB_CONNECTION_TIMEOUT,
DB_STATEMENT_TIMEOUT,
DB_LOCK_TIMEOUT,
DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
} = databaseConfig;

View file

@ -1,81 +1,81 @@
/**
* Dragonfly (Redis replacement) configuration using Yup
* High-performance caching and event streaming
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, num, bool } = envValidators;
/**
* Dragonfly configuration with validation and defaults
*/
export const dragonflyConfig = cleanEnv(process.env, {
// Dragonfly Connection
DRAGONFLY_HOST: str('localhost', 'Dragonfly host'),
DRAGONFLY_PORT: port(6379, 'Dragonfly port'),
DRAGONFLY_PASSWORD: str('', 'Dragonfly password (if auth enabled)'),
DRAGONFLY_USERNAME: str('', 'Dragonfly username (if ACL enabled)'),
// Database Selection
DRAGONFLY_DATABASE: num(0, 'Dragonfly database number (0-15)'),
// Connection Pool Settings
DRAGONFLY_MAX_RETRIES: num(3, 'Maximum retry attempts'),
DRAGONFLY_RETRY_DELAY: num(50, 'Retry delay in ms'),
DRAGONFLY_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'),
DRAGONFLY_COMMAND_TIMEOUT: num(5000, 'Command timeout in ms'),
// Pool Configuration
DRAGONFLY_POOL_SIZE: num(10, 'Connection pool size'),
DRAGONFLY_POOL_MIN: num(1, 'Minimum pool connections'),
DRAGONFLY_POOL_MAX: num(20, 'Maximum pool connections'),
// TLS Settings
DRAGONFLY_TLS: bool(false, 'Enable TLS for Dragonfly connection'),
DRAGONFLY_TLS_CERT_FILE: str('', 'Path to TLS certificate file'),
DRAGONFLY_TLS_KEY_FILE: str('', 'Path to TLS key file'),
DRAGONFLY_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'),
DRAGONFLY_TLS_SKIP_VERIFY: bool(false, 'Skip TLS certificate verification'),
// Performance Settings
DRAGONFLY_ENABLE_KEEPALIVE: bool(true, 'Enable TCP keepalive'),
DRAGONFLY_KEEPALIVE_INTERVAL: num(60, 'Keepalive interval in seconds'),
// Clustering (if using cluster mode)
DRAGONFLY_CLUSTER_MODE: bool(false, 'Enable cluster mode'),
DRAGONFLY_CLUSTER_NODES: str('', 'Comma-separated list of cluster nodes (host:port)'),
// Memory and Cache Settings
DRAGONFLY_MAX_MEMORY: str('2gb', 'Maximum memory usage'),
DRAGONFLY_CACHE_MODE: bool(true, 'Enable cache mode'),
});
// Export typed configuration object
export type DragonflyConfig = typeof dragonflyConfig;
// Export individual config values for convenience
export const {
DRAGONFLY_HOST,
DRAGONFLY_PORT,
DRAGONFLY_PASSWORD,
DRAGONFLY_USERNAME,
DRAGONFLY_DATABASE,
DRAGONFLY_MAX_RETRIES,
DRAGONFLY_RETRY_DELAY,
DRAGONFLY_CONNECT_TIMEOUT,
DRAGONFLY_COMMAND_TIMEOUT,
DRAGONFLY_POOL_SIZE,
DRAGONFLY_POOL_MIN,
DRAGONFLY_POOL_MAX,
DRAGONFLY_TLS,
DRAGONFLY_TLS_CERT_FILE,
DRAGONFLY_TLS_KEY_FILE,
DRAGONFLY_TLS_CA_FILE,
DRAGONFLY_TLS_SKIP_VERIFY,
DRAGONFLY_ENABLE_KEEPALIVE,
DRAGONFLY_KEEPALIVE_INTERVAL,
DRAGONFLY_CLUSTER_MODE,
DRAGONFLY_CLUSTER_NODES,
DRAGONFLY_MAX_MEMORY,
DRAGONFLY_CACHE_MODE,
} = dragonflyConfig;
/**
* Dragonfly (Redis replacement) configuration using Yup
* High-performance caching and event streaming
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, num, bool } = envValidators;
/**
* Dragonfly configuration with validation and defaults
*/
export const dragonflyConfig = cleanEnv(process.env, {
// Dragonfly Connection
DRAGONFLY_HOST: str('localhost', 'Dragonfly host'),
DRAGONFLY_PORT: port(6379, 'Dragonfly port'),
DRAGONFLY_PASSWORD: str('', 'Dragonfly password (if auth enabled)'),
DRAGONFLY_USERNAME: str('', 'Dragonfly username (if ACL enabled)'),
// Database Selection
DRAGONFLY_DATABASE: num(0, 'Dragonfly database number (0-15)'),
// Connection Pool Settings
DRAGONFLY_MAX_RETRIES: num(3, 'Maximum retry attempts'),
DRAGONFLY_RETRY_DELAY: num(50, 'Retry delay in ms'),
DRAGONFLY_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'),
DRAGONFLY_COMMAND_TIMEOUT: num(5000, 'Command timeout in ms'),
// Pool Configuration
DRAGONFLY_POOL_SIZE: num(10, 'Connection pool size'),
DRAGONFLY_POOL_MIN: num(1, 'Minimum pool connections'),
DRAGONFLY_POOL_MAX: num(20, 'Maximum pool connections'),
// TLS Settings
DRAGONFLY_TLS: bool(false, 'Enable TLS for Dragonfly connection'),
DRAGONFLY_TLS_CERT_FILE: str('', 'Path to TLS certificate file'),
DRAGONFLY_TLS_KEY_FILE: str('', 'Path to TLS key file'),
DRAGONFLY_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'),
DRAGONFLY_TLS_SKIP_VERIFY: bool(false, 'Skip TLS certificate verification'),
// Performance Settings
DRAGONFLY_ENABLE_KEEPALIVE: bool(true, 'Enable TCP keepalive'),
DRAGONFLY_KEEPALIVE_INTERVAL: num(60, 'Keepalive interval in seconds'),
// Clustering (if using cluster mode)
DRAGONFLY_CLUSTER_MODE: bool(false, 'Enable cluster mode'),
DRAGONFLY_CLUSTER_NODES: str('', 'Comma-separated list of cluster nodes (host:port)'),
// Memory and Cache Settings
DRAGONFLY_MAX_MEMORY: str('2gb', 'Maximum memory usage'),
DRAGONFLY_CACHE_MODE: bool(true, 'Enable cache mode'),
});
// Export typed configuration object
export type DragonflyConfig = typeof dragonflyConfig;
// Export individual config values for convenience
export const {
DRAGONFLY_HOST,
DRAGONFLY_PORT,
DRAGONFLY_PASSWORD,
DRAGONFLY_USERNAME,
DRAGONFLY_DATABASE,
DRAGONFLY_MAX_RETRIES,
DRAGONFLY_RETRY_DELAY,
DRAGONFLY_CONNECT_TIMEOUT,
DRAGONFLY_COMMAND_TIMEOUT,
DRAGONFLY_POOL_SIZE,
DRAGONFLY_POOL_MIN,
DRAGONFLY_POOL_MAX,
DRAGONFLY_TLS,
DRAGONFLY_TLS_CERT_FILE,
DRAGONFLY_TLS_KEY_FILE,
DRAGONFLY_TLS_CA_FILE,
DRAGONFLY_TLS_SKIP_VERIFY,
DRAGONFLY_ENABLE_KEEPALIVE,
DRAGONFLY_KEEPALIVE_INTERVAL,
DRAGONFLY_CLUSTER_MODE,
DRAGONFLY_CLUSTER_NODES,
DRAGONFLY_MAX_MEMORY,
DRAGONFLY_CACHE_MODE,
} = dragonflyConfig;

View file

@ -1,162 +1,162 @@
/**
* Environment validation utilities using Yup
*/
import * as yup from 'yup';
import { config } from 'dotenv';
import { join } from 'path';
import { existsSync } from 'fs';
// Function to find and load environment variables
function loadEnvFiles() {
const cwd = process.cwd();
const possiblePaths = [
// Current working directory
join(cwd, '.env'),
join(cwd, '.env.local'),
// Root of the workspace (common pattern)
join(cwd, '../../.env'),
join(cwd, '../../../.env'),
// Config library directory
join(__dirname, '../.env'),
join(__dirname, '../../.env'),
join(__dirname, '../../../.env'),
];
// Try to load each possible .env file
for (const envPath of possiblePaths) {
if (existsSync(envPath)) {
console.log(`📄 Loading environment from: ${envPath}`);
config({ path: envPath });
break; // Use the first .env file found
}
}
// Also try to load environment-specific files
const environment = process.env.NODE_ENV || 'development';
const envSpecificPaths = [
join(cwd, `.env.${environment}`),
join(cwd, `.env.${environment}.local`),
];
for (const envPath of envSpecificPaths) {
if (existsSync(envPath)) {
console.log(`📄 Loading ${environment} environment from: ${envPath}`);
config({ path: envPath, override: false }); // Don't override existing vars
}
}
}
// Load environment variables
loadEnvFiles();
/**
* Creates a Yup schema for environment variable validation
*/
export function createEnvSchema(shape: Record<string, any>) {
return yup.object(shape);
}
/**
* Validates environment variables against a Yup schema
*/
export function validateEnv(
schema: yup.ObjectSchema<any>,
env = process.env
): any {
try {
const result = schema.validateSync(env, { abortEarly: false });
return result;
} catch (error) {
if (error instanceof yup.ValidationError) {
console.error('❌ Invalid environment variables:');
error.inner.forEach((err) => {
console.error(` ${err.path}: ${err.message}`);
});
}
throw new Error('Environment validation failed');
}
}
/**
* Manually load environment variables from a specific path
*/
export function loadEnv(path?: string) {
if (path) {
console.log(`📄 Manually loading environment from: ${path}`);
config({ path });
} else {
loadEnvFiles();
}
}
/**
* Helper functions for common validation patterns
*/
export const envValidators = {
// String with default
str: (defaultValue?: string, description?: string) =>
yup.string().default(defaultValue || ''),
// String with choices (enum)
strWithChoices: (choices: string[], defaultValue?: string, description?: string) =>
yup.string().oneOf(choices).default(defaultValue || choices[0]),
// Required string
requiredStr: (description?: string) =>
yup.string().required('Required'),
// Port number
port: (defaultValue?: number, description?: string) =>
yup.number()
.integer()
.min(1)
.max(65535)
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return parseInt(originalVal, 10);
}
return val;
})
.default(defaultValue || 3000),
// Number with default
num: (defaultValue?: number, description?: string) =>
yup.number()
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return parseFloat(originalVal);
}
return val;
})
.default(defaultValue || 0),
// Boolean with default
bool: (defaultValue?: boolean, description?: string) =>
yup.boolean()
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return originalVal === 'true' || originalVal === '1';
}
return val;
})
.default(defaultValue || false),
// URL validation
url: (defaultValue?: string, description?: string) =>
yup.string().url().default(defaultValue || 'http://localhost'),
// Email validation
email: (description?: string) =>
yup.string().email(),
};
/**
* Legacy compatibility - creates a cleanEnv-like function
*/
export function cleanEnv(
env: Record<string, string | undefined>,
validators: Record<string, any>
): any {
const schema = createEnvSchema(validators);
return validateEnv(schema, env);
}
/**
* Environment validation utilities using Yup
*/
import * as yup from 'yup';
import { config } from 'dotenv';
import { join } from 'path';
import { existsSync } from 'fs';
// Function to find and load environment variables
function loadEnvFiles() {
const cwd = process.cwd();
const possiblePaths = [
// Current working directory
join(cwd, '.env'),
join(cwd, '.env.local'),
// Root of the workspace (common pattern)
join(cwd, '../../.env'),
join(cwd, '../../../.env'),
// Config library directory
join(__dirname, '../.env'),
join(__dirname, '../../.env'),
join(__dirname, '../../../.env'),
];
// Try to load each possible .env file
for (const envPath of possiblePaths) {
if (existsSync(envPath)) {
console.log(`📄 Loading environment from: ${envPath}`);
config({ path: envPath });
break; // Use the first .env file found
}
}
// Also try to load environment-specific files
const environment = process.env.NODE_ENV || 'development';
const envSpecificPaths = [
join(cwd, `.env.${environment}`),
join(cwd, `.env.${environment}.local`),
];
for (const envPath of envSpecificPaths) {
if (existsSync(envPath)) {
console.log(`📄 Loading ${environment} environment from: ${envPath}`);
config({ path: envPath, override: false }); // Don't override existing vars
}
}
}
// Load environment variables
loadEnvFiles();
/**
* Creates a Yup schema for environment variable validation
*/
export function createEnvSchema(shape: Record<string, any>) {
return yup.object(shape);
}
/**
* Validates environment variables against a Yup schema
*/
export function validateEnv(
schema: yup.ObjectSchema<any>,
env = process.env
): any {
try {
const result = schema.validateSync(env, { abortEarly: false });
return result;
} catch (error) {
if (error instanceof yup.ValidationError) {
console.error('❌ Invalid environment variables:');
error.inner.forEach((err) => {
console.error(` ${err.path}: ${err.message}`);
});
}
throw new Error('Environment validation failed');
}
}
/**
* Manually load environment variables from a specific path
*/
export function loadEnv(path?: string) {
if (path) {
console.log(`📄 Manually loading environment from: ${path}`);
config({ path });
} else {
loadEnvFiles();
}
}
/**
* Helper functions for common validation patterns
*/
export const envValidators = {
// String with default
str: (defaultValue?: string, description?: string) =>
yup.string().default(defaultValue || ''),
// String with choices (enum)
strWithChoices: (choices: string[], defaultValue?: string, description?: string) =>
yup.string().oneOf(choices).default(defaultValue || choices[0]),
// Required string
requiredStr: (description?: string) =>
yup.string().required('Required'),
// Port number
port: (defaultValue?: number, description?: string) =>
yup.number()
.integer()
.min(1)
.max(65535)
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return parseInt(originalVal, 10);
}
return val;
})
.default(defaultValue || 3000),
// Number with default
num: (defaultValue?: number, description?: string) =>
yup.number()
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return parseFloat(originalVal);
}
return val;
})
.default(defaultValue || 0),
// Boolean with default
bool: (defaultValue?: boolean, description?: string) =>
yup.boolean()
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return originalVal === 'true' || originalVal === '1';
}
return val;
})
.default(defaultValue || false),
// URL validation
url: (defaultValue?: string, description?: string) =>
yup.string().url().default(defaultValue || 'http://localhost'),
// Email validation
email: (description?: string) =>
yup.string().email(),
};
/**
* Legacy compatibility - creates a cleanEnv-like function
*/
export function cleanEnv(
env: Record<string, string | undefined>,
validators: Record<string, any>
): any {
const schema = createEnvSchema(validators);
return validateEnv(schema, env);
}

View file

@ -1,20 +1,20 @@
/**
* @stock-bot/config
*
* Configuration management library for Stock Bot platform using Yup
*/
// Re-export everything from all modules
export * from './env-utils';
export * from './core';
export * from './admin-interfaces';
export * from './database';
export * from './dragonfly';
export * from './postgres';
export * from './questdb';
export * from './mongodb';
export * from './logging';
export * from './loki';
export * from './monitoring';
export * from './data-providers';
export * from './risk';
/**
* @stock-bot/config
*
* Configuration management library for Stock Bot platform using Yup
*/
// Re-export everything from all modules
export * from './env-utils';
export * from './core';
export * from './admin-interfaces';
export * from './database';
export * from './dragonfly';
export * from './postgres';
export * from './questdb';
export * from './mongodb';
export * from './logging';
export * from './loki';
export * from './monitoring';
export * from './data-providers';
export * from './risk';

View file

@ -1,74 +1,74 @@
/**
* Logging configuration using Yup
* Application logging settings without Loki (Loki config is in monitoring.ts)
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, bool, num, strWithChoices } = envValidators;
/**
* Logging configuration with validation and defaults
*/
export const loggingConfig = cleanEnv(process.env, {
// Basic Logging Settings
LOG_LEVEL: strWithChoices(['debug', 'info', 'warn', 'error'], 'info', 'Logging level'),
LOG_FORMAT: strWithChoices(['json', 'simple', 'combined'], 'json', 'Log output format'),
LOG_CONSOLE: bool(true, 'Enable console logging'),
LOG_FILE: bool(false, 'Enable file logging'),
// File Logging Settings
LOG_FILE_PATH: str('logs', 'Log file directory path'),
LOG_FILE_MAX_SIZE: str('20m', 'Maximum log file size'),
LOG_FILE_MAX_FILES: num(14, 'Maximum number of log files to keep'),
LOG_FILE_DATE_PATTERN: str('YYYY-MM-DD', 'Log file date pattern'),
// Error Logging
LOG_ERROR_FILE: bool(true, 'Enable separate error log file'),
LOG_ERROR_STACK: bool(true, 'Include stack traces in error logs'),
// Performance Logging
LOG_PERFORMANCE: bool(false, 'Enable performance logging'),
LOG_SQL_QUERIES: bool(false, 'Log SQL queries'),
LOG_HTTP_REQUESTS: bool(true, 'Log HTTP requests'),
// Structured Logging
LOG_STRUCTURED: bool(true, 'Use structured logging format'),
LOG_TIMESTAMP: bool(true, 'Include timestamps in logs'),
LOG_CALLER_INFO: bool(false, 'Include caller information in logs'),
// Log Filtering
LOG_SILENT_MODULES: str('', 'Comma-separated list of modules to silence'),
LOG_VERBOSE_MODULES: str('', 'Comma-separated list of modules for verbose logging'),
// Application Context
LOG_SERVICE_NAME: str('stock-bot', 'Service name for log context'),
LOG_SERVICE_VERSION: str('1.0.0', 'Service version for log context'),
LOG_ENVIRONMENT: str('development', 'Environment for log context'),
});
// Export typed configuration object
export type LoggingConfig = typeof loggingConfig;
// Export individual config values for convenience
export const {
LOG_LEVEL,
LOG_FORMAT,
LOG_CONSOLE,
LOG_FILE,
LOG_FILE_PATH,
LOG_FILE_MAX_SIZE,
LOG_FILE_MAX_FILES,
LOG_FILE_DATE_PATTERN,
LOG_ERROR_FILE,
LOG_ERROR_STACK,
LOG_PERFORMANCE,
LOG_SQL_QUERIES,
LOG_HTTP_REQUESTS,
LOG_STRUCTURED,
LOG_TIMESTAMP,
LOG_CALLER_INFO,
LOG_SILENT_MODULES,
LOG_VERBOSE_MODULES,
LOG_SERVICE_NAME,
LOG_SERVICE_VERSION,
LOG_ENVIRONMENT,
} = loggingConfig;
/**
* Logging configuration using Yup
* Application logging settings without Loki (Loki config is in monitoring.ts)
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, bool, num, strWithChoices } = envValidators;
/**
* Logging configuration with validation and defaults
*/
export const loggingConfig = cleanEnv(process.env, {
// Basic Logging Settings
LOG_LEVEL: strWithChoices(['debug', 'info', 'warn', 'error'], 'info', 'Logging level'),
LOG_FORMAT: strWithChoices(['json', 'simple', 'combined'], 'json', 'Log output format'),
LOG_CONSOLE: bool(true, 'Enable console logging'),
LOG_FILE: bool(false, 'Enable file logging'),
// File Logging Settings
LOG_FILE_PATH: str('logs', 'Log file directory path'),
LOG_FILE_MAX_SIZE: str('20m', 'Maximum log file size'),
LOG_FILE_MAX_FILES: num(14, 'Maximum number of log files to keep'),
LOG_FILE_DATE_PATTERN: str('YYYY-MM-DD', 'Log file date pattern'),
// Error Logging
LOG_ERROR_FILE: bool(true, 'Enable separate error log file'),
LOG_ERROR_STACK: bool(true, 'Include stack traces in error logs'),
// Performance Logging
LOG_PERFORMANCE: bool(false, 'Enable performance logging'),
LOG_SQL_QUERIES: bool(false, 'Log SQL queries'),
LOG_HTTP_REQUESTS: bool(true, 'Log HTTP requests'),
// Structured Logging
LOG_STRUCTURED: bool(true, 'Use structured logging format'),
LOG_TIMESTAMP: bool(true, 'Include timestamps in logs'),
LOG_CALLER_INFO: bool(false, 'Include caller information in logs'),
// Log Filtering
LOG_SILENT_MODULES: str('', 'Comma-separated list of modules to silence'),
LOG_VERBOSE_MODULES: str('', 'Comma-separated list of modules for verbose logging'),
// Application Context
LOG_SERVICE_NAME: str('stock-bot', 'Service name for log context'),
LOG_SERVICE_VERSION: str('1.0.0', 'Service version for log context'),
LOG_ENVIRONMENT: str('development', 'Environment for log context'),
});
// Export typed configuration object
export type LoggingConfig = typeof loggingConfig;
// Export individual config values for convenience
export const {
LOG_LEVEL,
LOG_FORMAT,
LOG_CONSOLE,
LOG_FILE,
LOG_FILE_PATH,
LOG_FILE_MAX_SIZE,
LOG_FILE_MAX_FILES,
LOG_FILE_DATE_PATTERN,
LOG_ERROR_FILE,
LOG_ERROR_STACK,
LOG_PERFORMANCE,
LOG_SQL_QUERIES,
LOG_HTTP_REQUESTS,
LOG_STRUCTURED,
LOG_TIMESTAMP,
LOG_CALLER_INFO,
LOG_SILENT_MODULES,
LOG_VERBOSE_MODULES,
LOG_SERVICE_NAME,
LOG_SERVICE_VERSION,
LOG_ENVIRONMENT,
} = loggingConfig;

View file

@ -1,63 +1,63 @@
/**
* Loki log aggregation configuration using Yup
* Centralized logging configuration for the Stock Bot platform
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* Loki configuration with validation and defaults
*/
export const lokiConfig = cleanEnv(process.env, {
// Loki Server
LOKI_HOST: str('localhost', 'Loki host'),
LOKI_PORT: port(3100, 'Loki port'),
LOKI_URL: str('', 'Complete Loki URL (overrides host/port)'),
// Authentication
LOKI_USERNAME: str('', 'Loki username (if auth enabled)'),
LOKI_PASSWORD: str('', 'Loki password (if auth enabled)'),
LOKI_TENANT_ID: str('', 'Loki tenant ID (for multi-tenancy)'),
// Push Configuration
LOKI_PUSH_TIMEOUT: num(10000, 'Push timeout in ms'),
LOKI_BATCH_SIZE: num(1024, 'Batch size for log entries'),
LOKI_BATCH_WAIT: num(5, 'Batch wait time in ms'),
// Retention Settings
LOKI_RETENTION_PERIOD: str('30d', 'Log retention period'),
LOKI_MAX_CHUNK_AGE: str('1h', 'Maximum chunk age'),
// TLS Settings
LOKI_TLS_ENABLED: bool(false, 'Enable TLS for Loki'),
LOKI_TLS_INSECURE: bool(false, 'Skip TLS verification'),
// Log Labels
LOKI_DEFAULT_LABELS: str('', 'Default labels for all log entries (JSON format)'),
LOKI_SERVICE_LABEL: str('stock-bot', 'Service label for log entries'),
LOKI_ENVIRONMENT_LABEL: str('development', 'Environment label for log entries'),
});
// Export typed configuration object
export type LokiConfig = typeof lokiConfig;
// Export individual config values for convenience
export const {
LOKI_HOST,
LOKI_PORT,
LOKI_URL,
LOKI_USERNAME,
LOKI_PASSWORD,
LOKI_TENANT_ID,
LOKI_PUSH_TIMEOUT,
LOKI_BATCH_SIZE,
LOKI_BATCH_WAIT,
LOKI_RETENTION_PERIOD,
LOKI_MAX_CHUNK_AGE,
LOKI_TLS_ENABLED,
LOKI_TLS_INSECURE,
LOKI_DEFAULT_LABELS,
LOKI_SERVICE_LABEL,
LOKI_ENVIRONMENT_LABEL,
} = lokiConfig;
/**
* Loki log aggregation configuration using Yup
* Centralized logging configuration for the Stock Bot platform
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* Loki configuration with validation and defaults
*/
export const lokiConfig = cleanEnv(process.env, {
// Loki Server
LOKI_HOST: str('localhost', 'Loki host'),
LOKI_PORT: port(3100, 'Loki port'),
LOKI_URL: str('', 'Complete Loki URL (overrides host/port)'),
// Authentication
LOKI_USERNAME: str('', 'Loki username (if auth enabled)'),
LOKI_PASSWORD: str('', 'Loki password (if auth enabled)'),
LOKI_TENANT_ID: str('', 'Loki tenant ID (for multi-tenancy)'),
// Push Configuration
LOKI_PUSH_TIMEOUT: num(10000, 'Push timeout in ms'),
LOKI_BATCH_SIZE: num(1024, 'Batch size for log entries'),
LOKI_BATCH_WAIT: num(5, 'Batch wait time in ms'),
// Retention Settings
LOKI_RETENTION_PERIOD: str('30d', 'Log retention period'),
LOKI_MAX_CHUNK_AGE: str('1h', 'Maximum chunk age'),
// TLS Settings
LOKI_TLS_ENABLED: bool(false, 'Enable TLS for Loki'),
LOKI_TLS_INSECURE: bool(false, 'Skip TLS verification'),
// Log Labels
LOKI_DEFAULT_LABELS: str('', 'Default labels for all log entries (JSON format)'),
LOKI_SERVICE_LABEL: str('stock-bot', 'Service label for log entries'),
LOKI_ENVIRONMENT_LABEL: str('development', 'Environment label for log entries'),
});
// Export typed configuration object
export type LokiConfig = typeof lokiConfig;
// Export individual config values for convenience
export const {
LOKI_HOST,
LOKI_PORT,
LOKI_URL,
LOKI_USERNAME,
LOKI_PASSWORD,
LOKI_TENANT_ID,
LOKI_PUSH_TIMEOUT,
LOKI_BATCH_SIZE,
LOKI_BATCH_WAIT,
LOKI_RETENTION_PERIOD,
LOKI_MAX_CHUNK_AGE,
LOKI_TLS_ENABLED,
LOKI_TLS_INSECURE,
LOKI_DEFAULT_LABELS,
LOKI_SERVICE_LABEL,
LOKI_ENVIRONMENT_LABEL,
} = lokiConfig;

View file

@ -1,73 +1,73 @@
/**
* MongoDB configuration using Yup
* Document storage for sentiment data, raw documents, and unstructured data
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num, strWithChoices } = envValidators;
/**
* MongoDB configuration with validation and defaults
*/
export const mongodbConfig = cleanEnv(process.env, {
// MongoDB Connection
MONGODB_HOST: str('localhost', 'MongoDB host'),
MONGODB_PORT: port(27017, 'MongoDB port'),
MONGODB_DATABASE: str('trading_documents', 'MongoDB database name'),
// Authentication
MONGODB_USERNAME: str('trading_admin', 'MongoDB username'),
MONGODB_PASSWORD: str('', 'MongoDB password'),
MONGODB_AUTH_SOURCE: str('admin', 'MongoDB authentication database'),
// Connection URI (alternative to individual settings)
MONGODB_URI: str('', 'Complete MongoDB connection URI (overrides individual settings)'),
// Connection Pool Settings
MONGODB_MAX_POOL_SIZE: num(10, 'Maximum connection pool size'),
MONGODB_MIN_POOL_SIZE: num(0, 'Minimum connection pool size'),
MONGODB_MAX_IDLE_TIME: num(30000, 'Maximum idle time for connections in ms'),
// Timeouts
MONGODB_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'),
MONGODB_SOCKET_TIMEOUT: num(30000, 'Socket timeout in ms'),
MONGODB_SERVER_SELECTION_TIMEOUT: num(5000, 'Server selection timeout in ms'),
// SSL/TLS Settings
MONGODB_TLS: bool(false, 'Enable TLS for MongoDB connection'),
MONGODB_TLS_INSECURE: bool(false, 'Allow invalid certificates in TLS mode'),
MONGODB_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'),
// Additional Settings
MONGODB_RETRY_WRITES: bool(true, 'Enable retryable writes'),
MONGODB_JOURNAL: bool(true, 'Enable write concern journal'),
MONGODB_READ_PREFERENCE: strWithChoices(['primary', 'primaryPreferred', 'secondary', 'secondaryPreferred', 'nearest'], 'primary', 'MongoDB read preference'),
MONGODB_WRITE_CONCERN: str('majority', 'Write concern level'),
});
// Export typed configuration object
export type MongoDbConfig = typeof mongodbConfig;
// Export individual config values for convenience
export const {
MONGODB_HOST,
MONGODB_PORT,
MONGODB_DATABASE,
MONGODB_USERNAME,
MONGODB_PASSWORD,
MONGODB_AUTH_SOURCE,
MONGODB_URI,
MONGODB_MAX_POOL_SIZE,
MONGODB_MIN_POOL_SIZE,
MONGODB_MAX_IDLE_TIME,
MONGODB_CONNECT_TIMEOUT,
MONGODB_SOCKET_TIMEOUT,
MONGODB_SERVER_SELECTION_TIMEOUT,
MONGODB_TLS,
MONGODB_TLS_INSECURE,
MONGODB_TLS_CA_FILE,
MONGODB_RETRY_WRITES,
MONGODB_JOURNAL,
MONGODB_READ_PREFERENCE,
MONGODB_WRITE_CONCERN,
} = mongodbConfig;
/**
* MongoDB configuration using Yup
* Document storage for sentiment data, raw documents, and unstructured data
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num, strWithChoices } = envValidators;
/**
* MongoDB configuration with validation and defaults
*/
export const mongodbConfig = cleanEnv(process.env, {
// MongoDB Connection
MONGODB_HOST: str('localhost', 'MongoDB host'),
MONGODB_PORT: port(27017, 'MongoDB port'),
MONGODB_DATABASE: str('trading_documents', 'MongoDB database name'),
// Authentication
MONGODB_USERNAME: str('trading_admin', 'MongoDB username'),
MONGODB_PASSWORD: str('', 'MongoDB password'),
MONGODB_AUTH_SOURCE: str('admin', 'MongoDB authentication database'),
// Connection URI (alternative to individual settings)
MONGODB_URI: str('', 'Complete MongoDB connection URI (overrides individual settings)'),
// Connection Pool Settings
MONGODB_MAX_POOL_SIZE: num(10, 'Maximum connection pool size'),
MONGODB_MIN_POOL_SIZE: num(0, 'Minimum connection pool size'),
MONGODB_MAX_IDLE_TIME: num(30000, 'Maximum idle time for connections in ms'),
// Timeouts
MONGODB_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'),
MONGODB_SOCKET_TIMEOUT: num(30000, 'Socket timeout in ms'),
MONGODB_SERVER_SELECTION_TIMEOUT: num(5000, 'Server selection timeout in ms'),
// SSL/TLS Settings
MONGODB_TLS: bool(false, 'Enable TLS for MongoDB connection'),
MONGODB_TLS_INSECURE: bool(false, 'Allow invalid certificates in TLS mode'),
MONGODB_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'),
// Additional Settings
MONGODB_RETRY_WRITES: bool(true, 'Enable retryable writes'),
MONGODB_JOURNAL: bool(true, 'Enable write concern journal'),
MONGODB_READ_PREFERENCE: strWithChoices(['primary', 'primaryPreferred', 'secondary', 'secondaryPreferred', 'nearest'], 'primary', 'MongoDB read preference'),
MONGODB_WRITE_CONCERN: str('majority', 'Write concern level'),
});
// Export typed configuration object
export type MongoDbConfig = typeof mongodbConfig;
// Export individual config values for convenience
export const {
MONGODB_HOST,
MONGODB_PORT,
MONGODB_DATABASE,
MONGODB_USERNAME,
MONGODB_PASSWORD,
MONGODB_AUTH_SOURCE,
MONGODB_URI,
MONGODB_MAX_POOL_SIZE,
MONGODB_MIN_POOL_SIZE,
MONGODB_MAX_IDLE_TIME,
MONGODB_CONNECT_TIMEOUT,
MONGODB_SOCKET_TIMEOUT,
MONGODB_SERVER_SELECTION_TIMEOUT,
MONGODB_TLS,
MONGODB_TLS_INSECURE,
MONGODB_TLS_CA_FILE,
MONGODB_RETRY_WRITES,
MONGODB_JOURNAL,
MONGODB_READ_PREFERENCE,
MONGODB_WRITE_CONCERN,
} = mongodbConfig;

View file

@ -1,88 +1,88 @@
/**
* Monitoring configuration using Yup
* Prometheus metrics, Grafana visualization, and Loki logging
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num, strWithChoices } = envValidators;
/**
* Prometheus configuration with validation and defaults
*/
export const prometheusConfig = cleanEnv(process.env, {
// Prometheus Server
PROMETHEUS_HOST: str('localhost', 'Prometheus host'),
PROMETHEUS_PORT: port(9090, 'Prometheus port'),
PROMETHEUS_URL: str('', 'Complete Prometheus URL (overrides host/port)'),
// Authentication
PROMETHEUS_USERNAME: str('', 'Prometheus username (if auth enabled)'),
PROMETHEUS_PASSWORD: str('', 'Prometheus password (if auth enabled)'),
// Metrics Collection
PROMETHEUS_SCRAPE_INTERVAL: str('15s', 'Default scrape interval'),
PROMETHEUS_EVALUATION_INTERVAL: str('15s', 'Rule evaluation interval'),
PROMETHEUS_RETENTION_TIME: str('15d', 'Data retention time'),
// TLS Settings
PROMETHEUS_TLS_ENABLED: bool(false, 'Enable TLS for Prometheus'),
PROMETHEUS_TLS_INSECURE: bool(false, 'Skip TLS verification'),
});
/**
* Grafana configuration with validation and defaults
*/
export const grafanaConfig = cleanEnv(process.env, {
// Grafana Server
GRAFANA_HOST: str('localhost', 'Grafana host'),
GRAFANA_PORT: port(3000, 'Grafana port'),
GRAFANA_URL: str('', 'Complete Grafana URL (overrides host/port)'),
// Authentication
GRAFANA_ADMIN_USER: str('admin', 'Grafana admin username'),
GRAFANA_ADMIN_PASSWORD: str('admin', 'Grafana admin password'),
// Security Settings
GRAFANA_ALLOW_SIGN_UP: bool(false, 'Allow user sign up'),
GRAFANA_SECRET_KEY: str('', 'Grafana secret key for encryption'),
// Database Settings
GRAFANA_DATABASE_TYPE: strWithChoices(['mysql', 'postgres', 'sqlite3'], 'sqlite3', 'Grafana database type'),
GRAFANA_DATABASE_URL: str('', 'Grafana database URL'),
// Feature Flags
GRAFANA_DISABLE_GRAVATAR: bool(true, 'Disable Gravatar avatars'),
GRAFANA_ENABLE_GZIP: bool(true, 'Enable gzip compression'),
});
// Export typed configuration objects
export type PrometheusConfig = typeof prometheusConfig;
export type GrafanaConfig = typeof grafanaConfig;
// Export individual config values for convenience
export const {
PROMETHEUS_HOST,
PROMETHEUS_PORT,
PROMETHEUS_URL,
PROMETHEUS_USERNAME,
PROMETHEUS_PASSWORD,
PROMETHEUS_SCRAPE_INTERVAL,
PROMETHEUS_EVALUATION_INTERVAL,
PROMETHEUS_RETENTION_TIME,
PROMETHEUS_TLS_ENABLED,
PROMETHEUS_TLS_INSECURE,
} = prometheusConfig;
export const {
GRAFANA_HOST,
GRAFANA_PORT,
GRAFANA_URL,
GRAFANA_ADMIN_USER,
GRAFANA_ADMIN_PASSWORD,
GRAFANA_ALLOW_SIGN_UP,
GRAFANA_SECRET_KEY,
GRAFANA_DATABASE_TYPE,
GRAFANA_DATABASE_URL,
GRAFANA_DISABLE_GRAVATAR,
GRAFANA_ENABLE_GZIP,
} = grafanaConfig;
/**
* Monitoring configuration using Yup
* Prometheus metrics, Grafana visualization, and Loki logging
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num, strWithChoices } = envValidators;
/**
* Prometheus configuration with validation and defaults
*/
export const prometheusConfig = cleanEnv(process.env, {
// Prometheus Server
PROMETHEUS_HOST: str('localhost', 'Prometheus host'),
PROMETHEUS_PORT: port(9090, 'Prometheus port'),
PROMETHEUS_URL: str('', 'Complete Prometheus URL (overrides host/port)'),
// Authentication
PROMETHEUS_USERNAME: str('', 'Prometheus username (if auth enabled)'),
PROMETHEUS_PASSWORD: str('', 'Prometheus password (if auth enabled)'),
// Metrics Collection
PROMETHEUS_SCRAPE_INTERVAL: str('15s', 'Default scrape interval'),
PROMETHEUS_EVALUATION_INTERVAL: str('15s', 'Rule evaluation interval'),
PROMETHEUS_RETENTION_TIME: str('15d', 'Data retention time'),
// TLS Settings
PROMETHEUS_TLS_ENABLED: bool(false, 'Enable TLS for Prometheus'),
PROMETHEUS_TLS_INSECURE: bool(false, 'Skip TLS verification'),
});
/**
* Grafana configuration with validation and defaults
*/
export const grafanaConfig = cleanEnv(process.env, {
// Grafana Server
GRAFANA_HOST: str('localhost', 'Grafana host'),
GRAFANA_PORT: port(3000, 'Grafana port'),
GRAFANA_URL: str('', 'Complete Grafana URL (overrides host/port)'),
// Authentication
GRAFANA_ADMIN_USER: str('admin', 'Grafana admin username'),
GRAFANA_ADMIN_PASSWORD: str('admin', 'Grafana admin password'),
// Security Settings
GRAFANA_ALLOW_SIGN_UP: bool(false, 'Allow user sign up'),
GRAFANA_SECRET_KEY: str('', 'Grafana secret key for encryption'),
// Database Settings
GRAFANA_DATABASE_TYPE: strWithChoices(['mysql', 'postgres', 'sqlite3'], 'sqlite3', 'Grafana database type'),
GRAFANA_DATABASE_URL: str('', 'Grafana database URL'),
// Feature Flags
GRAFANA_DISABLE_GRAVATAR: bool(true, 'Disable Gravatar avatars'),
GRAFANA_ENABLE_GZIP: bool(true, 'Enable gzip compression'),
});
// Export typed configuration objects
export type PrometheusConfig = typeof prometheusConfig;
export type GrafanaConfig = typeof grafanaConfig;
// Export individual config values for convenience
export const {
PROMETHEUS_HOST,
PROMETHEUS_PORT,
PROMETHEUS_URL,
PROMETHEUS_USERNAME,
PROMETHEUS_PASSWORD,
PROMETHEUS_SCRAPE_INTERVAL,
PROMETHEUS_EVALUATION_INTERVAL,
PROMETHEUS_RETENTION_TIME,
PROMETHEUS_TLS_ENABLED,
PROMETHEUS_TLS_INSECURE,
} = prometheusConfig;
export const {
GRAFANA_HOST,
GRAFANA_PORT,
GRAFANA_URL,
GRAFANA_ADMIN_USER,
GRAFANA_ADMIN_PASSWORD,
GRAFANA_ALLOW_SIGN_UP,
GRAFANA_SECRET_KEY,
GRAFANA_DATABASE_TYPE,
GRAFANA_DATABASE_URL,
GRAFANA_DISABLE_GRAVATAR,
GRAFANA_ENABLE_GZIP,
} = grafanaConfig;

View file

@ -1,56 +1,56 @@
/**
* PostgreSQL configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* PostgreSQL configuration with validation and defaults
*/
export const postgresConfig = cleanEnv(process.env, {
// PostgreSQL Connection Settings
POSTGRES_HOST: str('localhost', 'PostgreSQL host'),
POSTGRES_PORT: port(5432, 'PostgreSQL port'),
POSTGRES_DATABASE: str('stockbot', 'PostgreSQL database name'),
POSTGRES_USERNAME: str('stockbot', 'PostgreSQL username'),
POSTGRES_PASSWORD: str('', 'PostgreSQL password'),
// Connection Pool Settings
POSTGRES_POOL_MIN: num(2, 'Minimum pool connections'),
POSTGRES_POOL_MAX: num(10, 'Maximum pool connections'),
POSTGRES_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'),
// SSL Configuration
POSTGRES_SSL: bool(false, 'Enable SSL for PostgreSQL connection'),
POSTGRES_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'),
// Additional Settings
POSTGRES_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'),
POSTGRES_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
POSTGRES_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'),
POSTGRES_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'),
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'),
});
// Export typed configuration object
export type PostgresConfig = typeof postgresConfig;
// Export individual config values for convenience
export const {
POSTGRES_HOST,
POSTGRES_PORT,
POSTGRES_DATABASE,
POSTGRES_USERNAME,
POSTGRES_PASSWORD,
POSTGRES_POOL_MIN,
POSTGRES_POOL_MAX,
POSTGRES_POOL_IDLE_TIMEOUT,
POSTGRES_SSL,
POSTGRES_SSL_REJECT_UNAUTHORIZED,
POSTGRES_QUERY_TIMEOUT,
POSTGRES_CONNECTION_TIMEOUT,
POSTGRES_STATEMENT_TIMEOUT,
POSTGRES_LOCK_TIMEOUT,
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
} = postgresConfig;
/**
* PostgreSQL configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* PostgreSQL configuration with validation and defaults
*/
export const postgresConfig = cleanEnv(process.env, {
// PostgreSQL Connection Settings
POSTGRES_HOST: str('localhost', 'PostgreSQL host'),
POSTGRES_PORT: port(5432, 'PostgreSQL port'),
POSTGRES_DATABASE: str('stockbot', 'PostgreSQL database name'),
POSTGRES_USERNAME: str('stockbot', 'PostgreSQL username'),
POSTGRES_PASSWORD: str('', 'PostgreSQL password'),
// Connection Pool Settings
POSTGRES_POOL_MIN: num(2, 'Minimum pool connections'),
POSTGRES_POOL_MAX: num(10, 'Maximum pool connections'),
POSTGRES_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'),
// SSL Configuration
POSTGRES_SSL: bool(false, 'Enable SSL for PostgreSQL connection'),
POSTGRES_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'),
// Additional Settings
POSTGRES_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'),
POSTGRES_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
POSTGRES_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'),
POSTGRES_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'),
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'),
});
// Export typed configuration object
export type PostgresConfig = typeof postgresConfig;
// Export individual config values for convenience
export const {
POSTGRES_HOST,
POSTGRES_PORT,
POSTGRES_DATABASE,
POSTGRES_USERNAME,
POSTGRES_PASSWORD,
POSTGRES_POOL_MIN,
POSTGRES_POOL_MAX,
POSTGRES_POOL_IDLE_TIMEOUT,
POSTGRES_SSL,
POSTGRES_SSL_REJECT_UNAUTHORIZED,
POSTGRES_QUERY_TIMEOUT,
POSTGRES_CONNECTION_TIMEOUT,
POSTGRES_STATEMENT_TIMEOUT,
POSTGRES_LOCK_TIMEOUT,
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
} = postgresConfig;

View file

@ -1,55 +1,55 @@
/**
* QuestDB configuration using Yup
* Time-series database for OHLCV data, indicators, and performance metrics
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* QuestDB configuration with validation and defaults
*/
export const questdbConfig = cleanEnv(process.env, {
// QuestDB Connection
QUESTDB_HOST: str('localhost', 'QuestDB host'),
QUESTDB_HTTP_PORT: port(9000, 'QuestDB HTTP port (web console)'),
QUESTDB_PG_PORT: port(8812, 'QuestDB PostgreSQL wire protocol port'),
QUESTDB_INFLUX_PORT: port(9009, 'QuestDB InfluxDB line protocol port'),
// Authentication (if enabled)
QUESTDB_USER: str('', 'QuestDB username (if auth enabled)'),
QUESTDB_PASSWORD: str('', 'QuestDB password (if auth enabled)'),
// Connection Settings
QUESTDB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
QUESTDB_REQUEST_TIMEOUT: num(30000, 'Request timeout in ms'),
QUESTDB_RETRY_ATTEMPTS: num(3, 'Number of retry attempts'),
// TLS Settings
QUESTDB_TLS_ENABLED: bool(false, 'Enable TLS for QuestDB connection'),
QUESTDB_TLS_VERIFY_SERVER_CERT: bool(true, 'Verify server certificate'),
// Database Settings
QUESTDB_DEFAULT_DATABASE: str('qdb', 'Default database name'),
QUESTDB_TELEMETRY_ENABLED: bool(false, 'Enable telemetry'),
});
// Export typed configuration object
export type QuestDbConfig = typeof questdbConfig;
// Export individual config values for convenience
export const {
QUESTDB_HOST,
QUESTDB_HTTP_PORT,
QUESTDB_PG_PORT,
QUESTDB_INFLUX_PORT,
QUESTDB_USER,
QUESTDB_PASSWORD,
QUESTDB_CONNECTION_TIMEOUT,
QUESTDB_REQUEST_TIMEOUT,
QUESTDB_RETRY_ATTEMPTS,
QUESTDB_TLS_ENABLED,
QUESTDB_TLS_VERIFY_SERVER_CERT,
QUESTDB_DEFAULT_DATABASE,
QUESTDB_TELEMETRY_ENABLED,
} = questdbConfig;
/**
* QuestDB configuration using Yup
* Time-series database for OHLCV data, indicators, and performance metrics
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* QuestDB configuration with validation and defaults
*/
export const questdbConfig = cleanEnv(process.env, {
// QuestDB Connection
QUESTDB_HOST: str('localhost', 'QuestDB host'),
QUESTDB_HTTP_PORT: port(9000, 'QuestDB HTTP port (web console)'),
QUESTDB_PG_PORT: port(8812, 'QuestDB PostgreSQL wire protocol port'),
QUESTDB_INFLUX_PORT: port(9009, 'QuestDB InfluxDB line protocol port'),
// Authentication (if enabled)
QUESTDB_USER: str('', 'QuestDB username (if auth enabled)'),
QUESTDB_PASSWORD: str('', 'QuestDB password (if auth enabled)'),
// Connection Settings
QUESTDB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
QUESTDB_REQUEST_TIMEOUT: num(30000, 'Request timeout in ms'),
QUESTDB_RETRY_ATTEMPTS: num(3, 'Number of retry attempts'),
// TLS Settings
QUESTDB_TLS_ENABLED: bool(false, 'Enable TLS for QuestDB connection'),
QUESTDB_TLS_VERIFY_SERVER_CERT: bool(true, 'Verify server certificate'),
// Database Settings
QUESTDB_DEFAULT_DATABASE: str('qdb', 'Default database name'),
QUESTDB_TELEMETRY_ENABLED: bool(false, 'Enable telemetry'),
});
// Export typed configuration object
export type QuestDbConfig = typeof questdbConfig;
// Export individual config values for convenience
export const {
QUESTDB_HOST,
QUESTDB_HTTP_PORT,
QUESTDB_PG_PORT,
QUESTDB_INFLUX_PORT,
QUESTDB_USER,
QUESTDB_PASSWORD,
QUESTDB_CONNECTION_TIMEOUT,
QUESTDB_REQUEST_TIMEOUT,
QUESTDB_RETRY_ATTEMPTS,
QUESTDB_TLS_ENABLED,
QUESTDB_TLS_VERIFY_SERVER_CERT,
QUESTDB_DEFAULT_DATABASE,
QUESTDB_TELEMETRY_ENABLED,
} = questdbConfig;

View file

@ -1,80 +1,80 @@
/**
* Risk management configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, num, bool, strWithChoices } = envValidators;
/**
* Risk configuration with validation and defaults
*/
export const riskConfig = cleanEnv(process.env, {
// Position Sizing
RISK_MAX_POSITION_SIZE: num(0.1, 'Maximum position size as percentage of portfolio'),
RISK_MAX_PORTFOLIO_EXPOSURE: num(0.8, 'Maximum portfolio exposure percentage'),
RISK_MAX_SINGLE_ASSET_EXPOSURE: num(0.2, 'Maximum exposure to single asset'),
RISK_MAX_SECTOR_EXPOSURE: num(0.3, 'Maximum exposure to single sector'),
// Stop Loss and Take Profit
RISK_DEFAULT_STOP_LOSS: num(0.05, 'Default stop loss percentage'),
RISK_DEFAULT_TAKE_PROFIT: num(0.15, 'Default take profit percentage'),
RISK_TRAILING_STOP_ENABLED: bool(true, 'Enable trailing stop losses'),
RISK_TRAILING_STOP_DISTANCE: num(0.03, 'Trailing stop distance percentage'),
// Risk Limits
RISK_MAX_DAILY_LOSS: num(0.05, 'Maximum daily loss percentage'),
RISK_MAX_WEEKLY_LOSS: num(0.1, 'Maximum weekly loss percentage'),
RISK_MAX_MONTHLY_LOSS: num(0.2, 'Maximum monthly loss percentage'),
// Volatility Controls
RISK_MAX_VOLATILITY_THRESHOLD: num(0.4, 'Maximum volatility threshold'),
RISK_VOLATILITY_LOOKBACK_DAYS: num(20, 'Volatility calculation lookback period'),
// Correlation Controls
RISK_MAX_CORRELATION_THRESHOLD: num(0.7, 'Maximum correlation between positions'),
RISK_CORRELATION_LOOKBACK_DAYS: num(60, 'Correlation calculation lookback period'),
// Leverage Controls
RISK_MAX_LEVERAGE: num(2.0, 'Maximum leverage allowed'),
RISK_MARGIN_CALL_THRESHOLD: num(0.3, 'Margin call threshold'),
// Circuit Breakers
RISK_CIRCUIT_BREAKER_ENABLED: bool(true, 'Enable circuit breakers'),
RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD: num(0.1, 'Circuit breaker loss threshold'),
RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES: num(60, 'Circuit breaker cooldown period'),
// Risk Model
RISK_MODEL_TYPE: strWithChoices(['var', 'cvar', 'expected_shortfall'], 'var', 'Risk model type'),
RISK_CONFIDENCE_LEVEL: num(0.95, 'Risk model confidence level'),
RISK_TIME_HORIZON_DAYS: num(1, 'Risk time horizon in days'),
});
// Export typed configuration object
export type RiskConfig = typeof riskConfig;
// Export individual config values for convenience
export const {
RISK_MAX_POSITION_SIZE,
RISK_MAX_PORTFOLIO_EXPOSURE,
RISK_MAX_SINGLE_ASSET_EXPOSURE,
RISK_MAX_SECTOR_EXPOSURE,
RISK_DEFAULT_STOP_LOSS,
RISK_DEFAULT_TAKE_PROFIT,
RISK_TRAILING_STOP_ENABLED,
RISK_TRAILING_STOP_DISTANCE,
RISK_MAX_DAILY_LOSS,
RISK_MAX_WEEKLY_LOSS,
RISK_MAX_MONTHLY_LOSS,
RISK_MAX_VOLATILITY_THRESHOLD,
RISK_VOLATILITY_LOOKBACK_DAYS,
RISK_MAX_CORRELATION_THRESHOLD,
RISK_CORRELATION_LOOKBACK_DAYS,
RISK_MAX_LEVERAGE,
RISK_MARGIN_CALL_THRESHOLD,
RISK_CIRCUIT_BREAKER_ENABLED,
RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD,
RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES,
RISK_MODEL_TYPE,
RISK_CONFIDENCE_LEVEL,
RISK_TIME_HORIZON_DAYS,
} = riskConfig;
/**
* Risk management configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, num, bool, strWithChoices } = envValidators;
/**
* Risk configuration with validation and defaults
*/
export const riskConfig = cleanEnv(process.env, {
// Position Sizing
RISK_MAX_POSITION_SIZE: num(0.1, 'Maximum position size as percentage of portfolio'),
RISK_MAX_PORTFOLIO_EXPOSURE: num(0.8, 'Maximum portfolio exposure percentage'),
RISK_MAX_SINGLE_ASSET_EXPOSURE: num(0.2, 'Maximum exposure to single asset'),
RISK_MAX_SECTOR_EXPOSURE: num(0.3, 'Maximum exposure to single sector'),
// Stop Loss and Take Profit
RISK_DEFAULT_STOP_LOSS: num(0.05, 'Default stop loss percentage'),
RISK_DEFAULT_TAKE_PROFIT: num(0.15, 'Default take profit percentage'),
RISK_TRAILING_STOP_ENABLED: bool(true, 'Enable trailing stop losses'),
RISK_TRAILING_STOP_DISTANCE: num(0.03, 'Trailing stop distance percentage'),
// Risk Limits
RISK_MAX_DAILY_LOSS: num(0.05, 'Maximum daily loss percentage'),
RISK_MAX_WEEKLY_LOSS: num(0.1, 'Maximum weekly loss percentage'),
RISK_MAX_MONTHLY_LOSS: num(0.2, 'Maximum monthly loss percentage'),
// Volatility Controls
RISK_MAX_VOLATILITY_THRESHOLD: num(0.4, 'Maximum volatility threshold'),
RISK_VOLATILITY_LOOKBACK_DAYS: num(20, 'Volatility calculation lookback period'),
// Correlation Controls
RISK_MAX_CORRELATION_THRESHOLD: num(0.7, 'Maximum correlation between positions'),
RISK_CORRELATION_LOOKBACK_DAYS: num(60, 'Correlation calculation lookback period'),
// Leverage Controls
RISK_MAX_LEVERAGE: num(2.0, 'Maximum leverage allowed'),
RISK_MARGIN_CALL_THRESHOLD: num(0.3, 'Margin call threshold'),
// Circuit Breakers
RISK_CIRCUIT_BREAKER_ENABLED: bool(true, 'Enable circuit breakers'),
RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD: num(0.1, 'Circuit breaker loss threshold'),
RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES: num(60, 'Circuit breaker cooldown period'),
// Risk Model
RISK_MODEL_TYPE: strWithChoices(['var', 'cvar', 'expected_shortfall'], 'var', 'Risk model type'),
RISK_CONFIDENCE_LEVEL: num(0.95, 'Risk model confidence level'),
RISK_TIME_HORIZON_DAYS: num(1, 'Risk time horizon in days'),
});
// Export typed configuration object
export type RiskConfig = typeof riskConfig;
// Export individual config values for convenience
export const {
RISK_MAX_POSITION_SIZE,
RISK_MAX_PORTFOLIO_EXPOSURE,
RISK_MAX_SINGLE_ASSET_EXPOSURE,
RISK_MAX_SECTOR_EXPOSURE,
RISK_DEFAULT_STOP_LOSS,
RISK_DEFAULT_TAKE_PROFIT,
RISK_TRAILING_STOP_ENABLED,
RISK_TRAILING_STOP_DISTANCE,
RISK_MAX_DAILY_LOSS,
RISK_MAX_WEEKLY_LOSS,
RISK_MAX_MONTHLY_LOSS,
RISK_MAX_VOLATILITY_THRESHOLD,
RISK_VOLATILITY_LOOKBACK_DAYS,
RISK_MAX_CORRELATION_THRESHOLD,
RISK_CORRELATION_LOOKBACK_DAYS,
RISK_MAX_LEVERAGE,
RISK_MARGIN_CALL_THRESHOLD,
RISK_CIRCUIT_BREAKER_ENABLED,
RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD,
RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES,
RISK_MODEL_TYPE,
RISK_CONFIDENCE_LEVEL,
RISK_TIME_HORIZON_DAYS,
} = riskConfig;

View file

@ -1,85 +1,85 @@
import {
databaseConfig,
questdbConfig,
mongodbConfig,
dragonflyConfig,
prometheusConfig,
grafanaConfig,
lokiConfig,
loggingConfig
} from './dist/index';
// Set test environment variables
process.env.NODE_ENV = 'test';
process.env.PORT = '3001';
// Database configs
process.env.DB_HOST = 'localhost';
process.env.DB_PORT = '5432';
process.env.DB_NAME = 'test_db';
process.env.DB_USER = 'test_user';
process.env.DB_PASSWORD = 'test_pass';
// QuestDB configs
process.env.QUESTDB_HOST = 'localhost';
process.env.QUESTDB_HTTP_PORT = '9000';
process.env.QUESTDB_PG_PORT = '8812';
// MongoDB configs
process.env.MONGODB_HOST = 'localhost';
process.env.MONGODB_PORT = '27017';
process.env.MONGODB_DATABASE = 'test_db';
// Dragonfly configs
process.env.DRAGONFLY_HOST = 'localhost';
process.env.DRAGONFLY_PORT = '6379';
// Monitoring configs
process.env.PROMETHEUS_HOST = 'localhost';
process.env.PROMETHEUS_PORT = '9090';
process.env.GRAFANA_HOST = 'localhost';
process.env.GRAFANA_PORT = '3000';
// Loki configs
process.env.LOKI_HOST = 'localhost';
process.env.LOKI_PORT = '3100';
// Logging configs
process.env.LOG_LEVEL = 'info';
process.env.LOG_FORMAT = 'json';
console.log('🔍 Testing configuration modules...\n');
const configs = [
{ name: 'Database', config: databaseConfig },
{ name: 'QuestDB', config: questdbConfig },
{ name: 'MongoDB', config: mongodbConfig },
{ name: 'Dragonfly', config: dragonflyConfig },
{ name: 'Prometheus', config: prometheusConfig },
{ name: 'Grafana', config: grafanaConfig },
{ name: 'Loki', config: lokiConfig },
{ name: 'Logging', config: loggingConfig },
];
let successful = 0;
for (const { name, config } of configs) {
try {
if (config && typeof config === 'object' && Object.keys(config).length > 0) {
console.log(`${name}: Loaded successfully`);
successful++;
} else {
console.log(`${name}: Invalid config object`);
}
} catch (error) {
console.log(`${name}: ${error.message}`);
}
}
console.log(`\n📊 Test Summary: ${successful}/${configs.length} modules loaded successfully`);
if (successful === configs.length) {
console.log('🎉 All configuration modules working correctly!');
} else {
console.log('⚠️ Some configuration modules have issues.');
}
import {
databaseConfig,
questdbConfig,
mongodbConfig,
dragonflyConfig,
prometheusConfig,
grafanaConfig,
lokiConfig,
loggingConfig
} from './dist/index';
// Set test environment variables
process.env.NODE_ENV = 'test';
process.env.PORT = '3001';
// Database configs
process.env.DB_HOST = 'localhost';
process.env.DB_PORT = '5432';
process.env.DB_NAME = 'test_db';
process.env.DB_USER = 'test_user';
process.env.DB_PASSWORD = 'test_pass';
// QuestDB configs
process.env.QUESTDB_HOST = 'localhost';
process.env.QUESTDB_HTTP_PORT = '9000';
process.env.QUESTDB_PG_PORT = '8812';
// MongoDB configs
process.env.MONGODB_HOST = 'localhost';
process.env.MONGODB_PORT = '27017';
process.env.MONGODB_DATABASE = 'test_db';
// Dragonfly configs
process.env.DRAGONFLY_HOST = 'localhost';
process.env.DRAGONFLY_PORT = '6379';
// Monitoring configs
process.env.PROMETHEUS_HOST = 'localhost';
process.env.PROMETHEUS_PORT = '9090';
process.env.GRAFANA_HOST = 'localhost';
process.env.GRAFANA_PORT = '3000';
// Loki configs
process.env.LOKI_HOST = 'localhost';
process.env.LOKI_PORT = '3100';
// Logging configs
process.env.LOG_LEVEL = 'info';
process.env.LOG_FORMAT = 'json';
console.log('🔍 Testing configuration modules...\n');
const configs = [
{ name: 'Database', config: databaseConfig },
{ name: 'QuestDB', config: questdbConfig },
{ name: 'MongoDB', config: mongodbConfig },
{ name: 'Dragonfly', config: dragonflyConfig },
{ name: 'Prometheus', config: prometheusConfig },
{ name: 'Grafana', config: grafanaConfig },
{ name: 'Loki', config: lokiConfig },
{ name: 'Logging', config: loggingConfig },
];
let successful = 0;
for (const { name, config } of configs) {
try {
if (config && typeof config === 'object' && Object.keys(config).length > 0) {
console.log(`${name}: Loaded successfully`);
successful++;
} else {
console.log(`${name}: Invalid config object`);
}
} catch (error) {
console.log(`${name}: ${error.message}`);
}
}
console.log(`\n📊 Test Summary: ${successful}/${configs.length} modules loaded successfully`);
if (successful === configs.length) {
console.log('🎉 All configuration modules working correctly!');
} else {
console.log('⚠️ Some configuration modules have issues.');
}

View file

@ -1,433 +1,433 @@
/**
* Integration Tests for Config Library
*
* Tests the entire configuration system including module interactions,
* environment loading, validation across modules, and type exports.
*/
import { describe, test, expect, beforeEach } from 'bun:test';
import { setTestEnv, clearEnvVars, getMinimalTestEnv } from '../test/setup';
describe('Config Library Integration', () => {
beforeEach(() => {
// Clear module cache for clean state
// Note: Bun handles module caching differently than Jest
});
describe('Complete Configuration Loading', () => { test('should load all configuration modules successfully', async () => {
setTestEnv(getMinimalTestEnv());
// Import all modules
const [
{ Environment, getEnvironment },
{ postgresConfig },
{ questdbConfig },
{ mongodbConfig },
{ loggingConfig },
{ riskConfig }
] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
// Verify all configs are loaded
expect(Environment).toBeDefined();
expect(getEnvironment).toBeDefined();
expect(postgresConfig).toBeDefined();
expect(questdbConfig).toBeDefined();
expect(mongodbConfig).toBeDefined();
expect(loggingConfig).toBeDefined();
expect(riskConfig).toBeDefined();
// Verify core utilities
expect(getEnvironment()).toBe(Environment.Testing); // Should be Testing due to NODE_ENV=test in setup
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); expect(questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodbConfig.MONGODB_HOST).toBe('localhost'); // fix: use correct property
expect(loggingConfig.LOG_LEVEL).toBeDefined();
expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1);
}); test('should handle missing required environment variables gracefully', async () => {
setTestEnv({
NODE_ENV: 'test'
// Missing required variables
});
// Should be able to load core utilities
const { Environment, getEnvironment } = await import('../src/core');
expect(Environment).toBeDefined();
expect(getEnvironment()).toBe(Environment.Testing);
// Should fail to load modules requiring specific vars (if they have required vars)
// Note: Most modules have defaults, so they might not throw
try {
const { postgresConfig } = await import('../src/postgres');
expect(postgresConfig).toBeDefined();
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value
} catch (error) {
// If it throws, that's also acceptable behavior
expect(error).toBeDefined();
}
}); test('should maintain consistency across environment detection', async () => {
setTestEnv({
NODE_ENV: 'production',
...getMinimalTestEnv()
});
const [
{ Environment, getEnvironment },
{ postgresConfig },
{ questdbConfig },
{ mongodbConfig },
{ loggingConfig }
] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging')
]);
// Note: Due to module caching, environment is set at first import
// All modules should detect the same environment (which will be Testing due to test setup)
expect(getEnvironment()).toBe(Environment.Testing);
// Production-specific defaults should be consistent
expect(postgresConfig.POSTGRES_SSL).toBe(false); // default is false unless overridden expect(questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // checking actual property name
expect(mongodbConfig.MONGODB_TLS).toBe(false); // checking actual property name
expect(loggingConfig.LOG_FORMAT).toBe('json');
});
});
describe('Main Index Exports', () => { test('should export all configuration objects from index', async () => {
setTestEnv(getMinimalTestEnv());
const config = await import('../src/index');
// Core utilities (no coreConfig object)
expect(config.Environment).toBeDefined();
expect(config.getEnvironment).toBeDefined();
expect(config.ConfigurationError).toBeDefined();
// Configuration objects
expect(config.postgresConfig).toBeDefined();
expect(config.questdbConfig).toBeDefined();
expect(config.mongodbConfig).toBeDefined();
expect(config.loggingConfig).toBeDefined();
expect(config.riskConfig).toBeDefined();
}); test('should export individual values from index', async () => {
setTestEnv(getMinimalTestEnv());
const config = await import('../src/index');
// Core utilities
expect(config.Environment).toBeDefined();
expect(config.getEnvironment).toBeDefined();
// Individual configuration values exported from modules
expect(config.POSTGRES_HOST).toBeDefined();
expect(config.POSTGRES_PORT).toBeDefined();
expect(config.QUESTDB_HOST).toBeDefined();
expect(config.MONGODB_HOST).toBeDefined();
// Risk values
expect(config.RISK_MAX_POSITION_SIZE).toBeDefined();
expect(config.RISK_MAX_DAILY_LOSS).toBeDefined();
// Logging values
expect(config.LOG_LEVEL).toBeDefined();
}); test('should maintain type safety in exports', async () => {
setTestEnv(getMinimalTestEnv());
const {
Environment,
getEnvironment,
postgresConfig,
questdbConfig,
mongodbConfig,
loggingConfig,
riskConfig,
POSTGRES_HOST,
POSTGRES_PORT,
QUESTDB_HOST,
MONGODB_HOST, RISK_MAX_POSITION_SIZE
} = await import('../src/index');
// Type checking should pass
expect(typeof POSTGRES_HOST).toBe('string');
expect(typeof POSTGRES_PORT).toBe('number');
expect(typeof QUESTDB_HOST).toBe('string');
expect(typeof MONGODB_HOST).toBe('string');
expect(typeof RISK_MAX_POSITION_SIZE).toBe('number');
// Configuration objects should have expected shapes
expect(postgresConfig).toHaveProperty('POSTGRES_HOST');
expect(postgresConfig).toHaveProperty('POSTGRES_PORT');
expect(questdbConfig).toHaveProperty('QUESTDB_HOST');
expect(mongodbConfig).toHaveProperty('MONGODB_HOST');
expect(loggingConfig).toHaveProperty('LOG_LEVEL');
expect(riskConfig).toHaveProperty('RISK_MAX_POSITION_SIZE');
});
});
describe('Environment Variable Validation', () => {
test('should validate environment variables across all modules', async () => {
setTestEnv({
NODE_ENV: 'test',
LOG_LEVEL: 'info', // valid level
POSTGRES_HOST: 'localhost',
POSTGRES_DATABASE: 'test',
POSTGRES_USERNAME: 'test',
POSTGRES_PASSWORD: 'test',
QUESTDB_HOST: 'localhost',
MONGODB_HOST: 'localhost',
MONGODB_DATABASE: 'test',
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05'
}); // All imports should succeed with valid config
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // default test env
expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost');
expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost');
expect(logging.loggingConfig.LOG_LEVEL).toBe('info'); // set in test
expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // from test env
}); test('should accept valid environment variables across all modules', async () => {
setTestEnv({
NODE_ENV: 'development',
LOG_LEVEL: 'debug',
POSTGRES_HOST: 'localhost',
POSTGRES_PORT: '5432',
POSTGRES_DATABASE: 'stockbot_dev',
POSTGRES_USERNAME: 'dev_user',
POSTGRES_PASSWORD: 'dev_pass',
POSTGRES_SSL: 'false',
QUESTDB_HOST: 'localhost',
QUESTDB_HTTP_PORT: '9000',
QUESTDB_PG_PORT: '8812',
MONGODB_HOST: 'localhost',
MONGODB_DATABASE: 'stockbot_dev',
RISK_MAX_POSITION_SIZE: '0.25',
RISK_MAX_DAILY_LOSS: '0.025',
LOG_FORMAT: 'json',
LOG_FILE_ENABLED: 'false'
});
// All imports should succeed
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
// Since this is the first test to set NODE_ENV to development and modules might not be cached yet,
// this could actually change the environment. Let's test what we actually get.
expect(core.getEnvironment()).toBeDefined(); // Just verify it returns something valid
expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost');
expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost');
expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default value
expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value
});
});
describe('Configuration Consistency', () => { test('should maintain consistent SSL settings across databases', async () => {
setTestEnv({
NODE_ENV: 'production',
POSTGRES_HOST: 'prod-postgres.com',
POSTGRES_DATABASE: 'prod_db',
POSTGRES_USERNAME: 'prod_user',
POSTGRES_PASSWORD: 'prod_pass',
QUESTDB_HOST: 'prod-questdb.com',
MONGODB_HOST: 'prod-mongo.com',
MONGODB_DATABASE: 'prod_db',
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05'
// SSL settings not explicitly set - should use defaults
});
const [postgres, questdb, mongodb] = await Promise.all([
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb')
]);
// Check actual SSL property names and their default values expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default is false
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // default is false
expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); // default is false
}); test('should maintain consistent environment detection across modules', async () => {
setTestEnv({
NODE_ENV: 'staging',
...getMinimalTestEnv()
});
const [core, logging] = await Promise.all([
import('../src/core'),
import('../src/logging')
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
// The setTestEnv call above doesn't actually change the real NODE_ENV because modules cache it
// So we check that the test setup is working correctly
expect(process.env.NODE_ENV).toBe('test'); // This is what's actually set in test environment
});
});
describe('Performance and Caching', () => { test('should cache configuration values between imports', async () => {
setTestEnv(getMinimalTestEnv());
// Import the same module multiple times
const postgres1 = await import('../src/postgres');
const postgres2 = await import('../src/postgres');
const postgres3 = await import('../src/postgres');
// Should return the same object reference (cached)
expect(postgres1.postgresConfig).toBe(postgres2.postgresConfig);
expect(postgres2.postgresConfig).toBe(postgres3.postgresConfig);
});
test('should handle rapid sequential imports', async () => {
setTestEnv(getMinimalTestEnv());
// Import all modules simultaneously
const startTime = Date.now();
await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
const endTime = Date.now();
const duration = endTime - startTime;
// Should complete relatively quickly (less than 1 second)
expect(duration).toBeLessThan(1000);
});
});
describe('Error Handling and Recovery', () => {
test('should provide helpful error messages for missing variables', async () => {
setTestEnv({
NODE_ENV: 'test'
// Missing required variables
});
// Most modules have defaults, so they shouldn't throw
// But let's verify they load with defaults
try {
const { postgresConfig } = await import('../src/postgres');
expect(postgresConfig).toBeDefined();
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value
} catch (error) {
// If it throws, check that error message is helpful
expect((error as Error).message).toBeTruthy();
}
try {
const { riskConfig } = await import('../src/risk');
expect(riskConfig).toBeDefined();
expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value
} catch (error) {
// If it throws, check that error message is helpful
expect((error as Error).message).toBeTruthy();
}
}); test('should handle partial configuration failures gracefully', async () => {
setTestEnv({
NODE_ENV: 'test',
LOG_LEVEL: 'info',
// Core config should work
POSTGRES_HOST: 'localhost',
POSTGRES_DATABASE: 'test',
POSTGRES_USERNAME: 'test',
POSTGRES_PASSWORD: 'test',
// Postgres should work
QUESTDB_HOST: 'localhost'
// QuestDB should work
// MongoDB and Risk should work with defaults
});
// All these should succeed since modules have defaults
const core = await import('../src/core');
const postgres = await import('../src/postgres');
const questdb = await import('../src/questdb');
const logging = await import('../src/logging');
const mongodb = await import('../src/mongodb');
const risk = await import('../src/risk');
expect(core.Environment).toBeDefined();
expect(postgres.postgresConfig).toBeDefined();
expect(questdb.questdbConfig).toBeDefined();
expect(logging.loggingConfig).toBeDefined();
expect(mongodb.mongodbConfig).toBeDefined();
expect(risk.riskConfig).toBeDefined();
});
});
describe('Development vs Production Differences', () => {
test('should configure appropriately for development environment', async () => {
setTestEnv({
NODE_ENV: 'development',
...getMinimalTestEnv(),
POSTGRES_SSL: undefined, // Should default to false
QUESTDB_TLS_ENABLED: undefined, // Should default to false
MONGODB_TLS: undefined, // Should default to false
LOG_FORMAT: undefined, // Should default to json
RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true
});
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false);
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false);
expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default
expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true); // default
});
test('should configure appropriately for production environment', async () => {
setTestEnv({
NODE_ENV: 'production',
...getMinimalTestEnv(),
POSTGRES_SSL: undefined, // Should default to false (same as dev)
QUESTDB_TLS_ENABLED: undefined, // Should default to false
MONGODB_TLS: undefined, // Should default to false
LOG_FORMAT: undefined, // Should default to json
RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true
});
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk') ]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default doesn't change by env
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false);
expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false);
expect(logging.loggingConfig.LOG_FORMAT).toBe('json');
expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true);
});
});
});
/**
* Integration Tests for Config Library
*
* Tests the entire configuration system including module interactions,
* environment loading, validation across modules, and type exports.
*/
import { describe, test, expect, beforeEach } from 'bun:test';
import { setTestEnv, clearEnvVars, getMinimalTestEnv } from '../test/setup';
describe('Config Library Integration', () => {
beforeEach(() => {
// Clear module cache for clean state
// Note: Bun handles module caching differently than Jest
});
describe('Complete Configuration Loading', () => { test('should load all configuration modules successfully', async () => {
setTestEnv(getMinimalTestEnv());
// Import all modules
const [
{ Environment, getEnvironment },
{ postgresConfig },
{ questdbConfig },
{ mongodbConfig },
{ loggingConfig },
{ riskConfig }
] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
// Verify all configs are loaded
expect(Environment).toBeDefined();
expect(getEnvironment).toBeDefined();
expect(postgresConfig).toBeDefined();
expect(questdbConfig).toBeDefined();
expect(mongodbConfig).toBeDefined();
expect(loggingConfig).toBeDefined();
expect(riskConfig).toBeDefined();
// Verify core utilities
expect(getEnvironment()).toBe(Environment.Testing); // Should be Testing due to NODE_ENV=test in setup
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); expect(questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodbConfig.MONGODB_HOST).toBe('localhost'); // fix: use correct property
expect(loggingConfig.LOG_LEVEL).toBeDefined();
expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1);
}); test('should handle missing required environment variables gracefully', async () => {
setTestEnv({
NODE_ENV: 'test'
// Missing required variables
});
// Should be able to load core utilities
const { Environment, getEnvironment } = await import('../src/core');
expect(Environment).toBeDefined();
expect(getEnvironment()).toBe(Environment.Testing);
// Should fail to load modules requiring specific vars (if they have required vars)
// Note: Most modules have defaults, so they might not throw
try {
const { postgresConfig } = await import('../src/postgres');
expect(postgresConfig).toBeDefined();
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value
} catch (error) {
// If it throws, that's also acceptable behavior
expect(error).toBeDefined();
}
}); test('should maintain consistency across environment detection', async () => {
setTestEnv({
NODE_ENV: 'production',
...getMinimalTestEnv()
});
const [
{ Environment, getEnvironment },
{ postgresConfig },
{ questdbConfig },
{ mongodbConfig },
{ loggingConfig }
] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging')
]);
// Note: Due to module caching, environment is set at first import
// All modules should detect the same environment (which will be Testing due to test setup)
expect(getEnvironment()).toBe(Environment.Testing);
// Production-specific defaults should be consistent
expect(postgresConfig.POSTGRES_SSL).toBe(false); // default is false unless overridden expect(questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // checking actual property name
expect(mongodbConfig.MONGODB_TLS).toBe(false); // checking actual property name
expect(loggingConfig.LOG_FORMAT).toBe('json');
});
});
describe('Main Index Exports', () => { test('should export all configuration objects from index', async () => {
setTestEnv(getMinimalTestEnv());
const config = await import('../src/index');
// Core utilities (no coreConfig object)
expect(config.Environment).toBeDefined();
expect(config.getEnvironment).toBeDefined();
expect(config.ConfigurationError).toBeDefined();
// Configuration objects
expect(config.postgresConfig).toBeDefined();
expect(config.questdbConfig).toBeDefined();
expect(config.mongodbConfig).toBeDefined();
expect(config.loggingConfig).toBeDefined();
expect(config.riskConfig).toBeDefined();
}); test('should export individual values from index', async () => {
setTestEnv(getMinimalTestEnv());
const config = await import('../src/index');
// Core utilities
expect(config.Environment).toBeDefined();
expect(config.getEnvironment).toBeDefined();
// Individual configuration values exported from modules
expect(config.POSTGRES_HOST).toBeDefined();
expect(config.POSTGRES_PORT).toBeDefined();
expect(config.QUESTDB_HOST).toBeDefined();
expect(config.MONGODB_HOST).toBeDefined();
// Risk values
expect(config.RISK_MAX_POSITION_SIZE).toBeDefined();
expect(config.RISK_MAX_DAILY_LOSS).toBeDefined();
// Logging values
expect(config.LOG_LEVEL).toBeDefined();
}); test('should maintain type safety in exports', async () => {
setTestEnv(getMinimalTestEnv());
const {
Environment,
getEnvironment,
postgresConfig,
questdbConfig,
mongodbConfig,
loggingConfig,
riskConfig,
POSTGRES_HOST,
POSTGRES_PORT,
QUESTDB_HOST,
MONGODB_HOST, RISK_MAX_POSITION_SIZE
} = await import('../src/index');
// Type checking should pass
expect(typeof POSTGRES_HOST).toBe('string');
expect(typeof POSTGRES_PORT).toBe('number');
expect(typeof QUESTDB_HOST).toBe('string');
expect(typeof MONGODB_HOST).toBe('string');
expect(typeof RISK_MAX_POSITION_SIZE).toBe('number');
// Configuration objects should have expected shapes
expect(postgresConfig).toHaveProperty('POSTGRES_HOST');
expect(postgresConfig).toHaveProperty('POSTGRES_PORT');
expect(questdbConfig).toHaveProperty('QUESTDB_HOST');
expect(mongodbConfig).toHaveProperty('MONGODB_HOST');
expect(loggingConfig).toHaveProperty('LOG_LEVEL');
expect(riskConfig).toHaveProperty('RISK_MAX_POSITION_SIZE');
});
});
describe('Environment Variable Validation', () => {
test('should validate environment variables across all modules', async () => {
setTestEnv({
NODE_ENV: 'test',
LOG_LEVEL: 'info', // valid level
POSTGRES_HOST: 'localhost',
POSTGRES_DATABASE: 'test',
POSTGRES_USERNAME: 'test',
POSTGRES_PASSWORD: 'test',
QUESTDB_HOST: 'localhost',
MONGODB_HOST: 'localhost',
MONGODB_DATABASE: 'test',
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05'
}); // All imports should succeed with valid config
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // default test env
expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost');
expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost');
expect(logging.loggingConfig.LOG_LEVEL).toBe('info'); // set in test
expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // from test env
}); test('should accept valid environment variables across all modules', async () => {
setTestEnv({
NODE_ENV: 'development',
LOG_LEVEL: 'debug',
POSTGRES_HOST: 'localhost',
POSTGRES_PORT: '5432',
POSTGRES_DATABASE: 'stockbot_dev',
POSTGRES_USERNAME: 'dev_user',
POSTGRES_PASSWORD: 'dev_pass',
POSTGRES_SSL: 'false',
QUESTDB_HOST: 'localhost',
QUESTDB_HTTP_PORT: '9000',
QUESTDB_PG_PORT: '8812',
MONGODB_HOST: 'localhost',
MONGODB_DATABASE: 'stockbot_dev',
RISK_MAX_POSITION_SIZE: '0.25',
RISK_MAX_DAILY_LOSS: '0.025',
LOG_FORMAT: 'json',
LOG_FILE_ENABLED: 'false'
});
// All imports should succeed
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
// Since this is the first test to set NODE_ENV to development and modules might not be cached yet,
// this could actually change the environment. Let's test what we actually get.
expect(core.getEnvironment()).toBeDefined(); // Just verify it returns something valid
expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost');
expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost');
expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default value
expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value
});
});
describe('Configuration Consistency', () => { test('should maintain consistent SSL settings across databases', async () => {
setTestEnv({
NODE_ENV: 'production',
POSTGRES_HOST: 'prod-postgres.com',
POSTGRES_DATABASE: 'prod_db',
POSTGRES_USERNAME: 'prod_user',
POSTGRES_PASSWORD: 'prod_pass',
QUESTDB_HOST: 'prod-questdb.com',
MONGODB_HOST: 'prod-mongo.com',
MONGODB_DATABASE: 'prod_db',
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05'
// SSL settings not explicitly set - should use defaults
});
const [postgres, questdb, mongodb] = await Promise.all([
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb')
]);
// Check actual SSL property names and their default values expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default is false
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // default is false
expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); // default is false
}); test('should maintain consistent environment detection across modules', async () => {
setTestEnv({
NODE_ENV: 'staging',
...getMinimalTestEnv()
});
const [core, logging] = await Promise.all([
import('../src/core'),
import('../src/logging')
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
// The setTestEnv call above doesn't actually change the real NODE_ENV because modules cache it
// So we check that the test setup is working correctly
expect(process.env.NODE_ENV).toBe('test'); // This is what's actually set in test environment
});
});
describe('Performance and Caching', () => { test('should cache configuration values between imports', async () => {
setTestEnv(getMinimalTestEnv());
// Import the same module multiple times
const postgres1 = await import('../src/postgres');
const postgres2 = await import('../src/postgres');
const postgres3 = await import('../src/postgres');
// Should return the same object reference (cached)
expect(postgres1.postgresConfig).toBe(postgres2.postgresConfig);
expect(postgres2.postgresConfig).toBe(postgres3.postgresConfig);
});
test('should handle rapid sequential imports', async () => {
setTestEnv(getMinimalTestEnv());
// Import all modules simultaneously
const startTime = Date.now();
await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
const endTime = Date.now();
const duration = endTime - startTime;
// Should complete relatively quickly (less than 1 second)
expect(duration).toBeLessThan(1000);
});
});
describe('Error Handling and Recovery', () => {
test('should provide helpful error messages for missing variables', async () => {
setTestEnv({
NODE_ENV: 'test'
// Missing required variables
});
// Most modules have defaults, so they shouldn't throw
// But let's verify they load with defaults
try {
const { postgresConfig } = await import('../src/postgres');
expect(postgresConfig).toBeDefined();
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value
} catch (error) {
// If it throws, check that error message is helpful
expect((error as Error).message).toBeTruthy();
}
try {
const { riskConfig } = await import('../src/risk');
expect(riskConfig).toBeDefined();
expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value
} catch (error) {
// If it throws, check that error message is helpful
expect((error as Error).message).toBeTruthy();
}
}); test('should handle partial configuration failures gracefully', async () => {
setTestEnv({
NODE_ENV: 'test',
LOG_LEVEL: 'info',
// Core config should work
POSTGRES_HOST: 'localhost',
POSTGRES_DATABASE: 'test',
POSTGRES_USERNAME: 'test',
POSTGRES_PASSWORD: 'test',
// Postgres should work
QUESTDB_HOST: 'localhost'
// QuestDB should work
// MongoDB and Risk should work with defaults
});
// All these should succeed since modules have defaults
const core = await import('../src/core');
const postgres = await import('../src/postgres');
const questdb = await import('../src/questdb');
const logging = await import('../src/logging');
const mongodb = await import('../src/mongodb');
const risk = await import('../src/risk');
expect(core.Environment).toBeDefined();
expect(postgres.postgresConfig).toBeDefined();
expect(questdb.questdbConfig).toBeDefined();
expect(logging.loggingConfig).toBeDefined();
expect(mongodb.mongodbConfig).toBeDefined();
expect(risk.riskConfig).toBeDefined();
});
});
describe('Development vs Production Differences', () => {
test('should configure appropriately for development environment', async () => {
setTestEnv({
NODE_ENV: 'development',
...getMinimalTestEnv(),
POSTGRES_SSL: undefined, // Should default to false
QUESTDB_TLS_ENABLED: undefined, // Should default to false
MONGODB_TLS: undefined, // Should default to false
LOG_FORMAT: undefined, // Should default to json
RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true
});
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false);
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false);
expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default
expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true); // default
});
test('should configure appropriately for production environment', async () => {
setTestEnv({
NODE_ENV: 'production',
...getMinimalTestEnv(),
POSTGRES_SSL: undefined, // Should default to false (same as dev)
QUESTDB_TLS_ENABLED: undefined, // Should default to false
MONGODB_TLS: undefined, // Should default to false
LOG_FORMAT: undefined, // Should default to json
RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true
});
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk') ]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default doesn't change by env
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false);
expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false);
expect(logging.loggingConfig.LOG_FORMAT).toBe('json');
expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true);
});
});
});

View file

@ -1,92 +1,92 @@
/**
* Test Setup for @stock-bot/config Library
*
* Provides common setup and utilities for testing configuration modules.
*/
// Set NODE_ENV immediately at module load time
process.env.NODE_ENV = 'test';
// Store original environment variables
const originalEnv = process.env;
// Note: Bun provides its own test globals, no need to import from @jest/globals
beforeEach(() => {
// Reset environment variables to original state
process.env = { ...originalEnv };
// Ensure NODE_ENV is set to test by default
process.env.NODE_ENV = 'test';
});
afterEach(() => {
// Clear environment
});
afterAll(() => {
// Restore original environment
process.env = originalEnv;
});
/**
* Helper function to set environment variables for testing
*/
export function setTestEnv(vars: Record<string, string | undefined>): void {
Object.assign(process.env, vars);
}
/**
* Helper function to clear specific environment variables
*/
export function clearEnvVars(vars: string[]): void {
vars.forEach(varName => {
delete process.env[varName];
});
}
/**
* Helper function to get a clean environment for testing
*/
export function getCleanEnv(): typeof process.env {
return {
NODE_ENV: 'test'
};
}
/**
* Helper function to create minimal required environment variables
*/
export function getMinimalTestEnv(): Record<string, string> { return {
NODE_ENV: 'test',
// Logging
LOG_LEVEL: 'info', // Changed from 'error' to 'info' to match test expectations
// Database
POSTGRES_HOST: 'localhost',
POSTGRES_PORT: '5432',
POSTGRES_DATABASE: 'test_db',
POSTGRES_USERNAME: 'test_user',
POSTGRES_PASSWORD: 'test_pass',
// QuestDB
QUESTDB_HOST: 'localhost',
QUESTDB_HTTP_PORT: '9000',
QUESTDB_PG_PORT: '8812',
// MongoDB
MONGODB_HOST: 'localhost',
MONGODB_PORT: '27017',
MONGODB_DATABASE: 'test_db',
MONGODB_USERNAME: 'test_user',
MONGODB_PASSWORD: 'test_pass',
// Dragonfly
DRAGONFLY_HOST: 'localhost',
DRAGONFLY_PORT: '6379',
// Monitoring
PROMETHEUS_PORT: '9090',
GRAFANA_PORT: '3000',
// Data Providers
DATA_PROVIDER_API_KEY: 'test_key',
// Risk
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05',
// Admin
ADMIN_PORT: '8080'
};
}
/**
* Test Setup for @stock-bot/config Library
*
* Provides common setup and utilities for testing configuration modules.
*/
// Set NODE_ENV immediately at module load time
process.env.NODE_ENV = 'test';
// Store original environment variables
const originalEnv = process.env;
// Note: Bun provides its own test globals, no need to import from @jest/globals
beforeEach(() => {
// Reset environment variables to original state
process.env = { ...originalEnv };
// Ensure NODE_ENV is set to test by default
process.env.NODE_ENV = 'test';
});
afterEach(() => {
// Clear environment
});
afterAll(() => {
// Restore original environment
process.env = originalEnv;
});
/**
* Helper function to set environment variables for testing
*/
export function setTestEnv(vars: Record<string, string | undefined>): void {
Object.assign(process.env, vars);
}
/**
* Helper function to clear specific environment variables
*/
export function clearEnvVars(vars: string[]): void {
vars.forEach(varName => {
delete process.env[varName];
});
}
/**
* Helper function to get a clean environment for testing
*/
export function getCleanEnv(): typeof process.env {
return {
NODE_ENV: 'test'
};
}
/**
* Helper function to create minimal required environment variables
*/
export function getMinimalTestEnv(): Record<string, string> { return {
NODE_ENV: 'test',
// Logging
LOG_LEVEL: 'info', // Changed from 'error' to 'info' to match test expectations
// Database
POSTGRES_HOST: 'localhost',
POSTGRES_PORT: '5432',
POSTGRES_DATABASE: 'test_db',
POSTGRES_USERNAME: 'test_user',
POSTGRES_PASSWORD: 'test_pass',
// QuestDB
QUESTDB_HOST: 'localhost',
QUESTDB_HTTP_PORT: '9000',
QUESTDB_PG_PORT: '8812',
// MongoDB
MONGODB_HOST: 'localhost',
MONGODB_PORT: '27017',
MONGODB_DATABASE: 'test_db',
MONGODB_USERNAME: 'test_user',
MONGODB_PASSWORD: 'test_pass',
// Dragonfly
DRAGONFLY_HOST: 'localhost',
DRAGONFLY_PORT: '6379',
// Monitoring
PROMETHEUS_PORT: '9090',
GRAFANA_PORT: '3000',
// Data Providers
DATA_PROVIDER_API_KEY: 'test_key',
// Risk
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05',
// Admin
ADMIN_PORT: '8080'
};
}

View file

@ -1,12 +1,12 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**/*", "**/tests/**/*"],
"references": [
{ "path": "../types" }
]
}
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**/*", "**/tests/**/*"],
"references": [
{ "path": "../types" }
]
}

View file

@ -1,10 +1,10 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}

View file

@ -1,118 +1,118 @@
#!/usr/bin/env node
/**
* Configuration Validation Script
* Tests that all configuration modules can be loaded and validated
*/
// Set test environment variables
process.env.NODE_ENV = 'test';
process.env.PORT = '3001';
// Database configs
process.env.DB_HOST = 'localhost';
process.env.DB_PORT = '5432';
process.env.DB_NAME = 'test_db';
process.env.DB_USER = 'test_user';
process.env.DB_PASSWORD = 'test_pass';
// QuestDB configs
process.env.QUESTDB_HOST = 'localhost';
process.env.QUESTDB_HTTP_PORT = '9000';
process.env.QUESTDB_PG_PORT = '8812';
// MongoDB configs
process.env.MONGODB_HOST = 'localhost';
process.env.MONGODB_PORT = '27017';
process.env.MONGODB_DATABASE = 'test_db';
// Dragonfly configs
process.env.DRAGONFLY_HOST = 'localhost';
process.env.DRAGONFLY_PORT = '6379';
// Monitoring configs
process.env.PROMETHEUS_HOST = 'localhost';
process.env.PROMETHEUS_PORT = '9090';
process.env.GRAFANA_HOST = 'localhost';
process.env.GRAFANA_PORT = '3000';
// Loki configs
process.env.LOKI_HOST = 'localhost';
process.env.LOKI_PORT = '3100';
// Logging configs
process.env.LOG_LEVEL = 'info';
process.env.LOG_FORMAT = 'json';
try {
console.log('🔍 Validating configuration modules...\n');
// Test each configuration module
const modules = [
{ name: 'Database', path: './dist/database.js' },
{ name: 'QuestDB', path: './dist/questdb.js' },
{ name: 'MongoDB', path: './dist/mongodb.js' },
{ name: 'Dragonfly', path: './dist/dragonfly.js' },
{ name: 'Monitoring', path: './dist/monitoring.js' },
{ name: 'Loki', path: './dist/loki.js' },
{ name: 'Logging', path: './dist/logging.js' },
];
const results = [];
for (const module of modules) {
try {
const config = require(module.path);
const configKeys = Object.keys(config);
if (configKeys.length === 0) {
throw new Error('No exported configuration found');
}
// Try to access the main config object
const mainConfig = config[configKeys[0]];
if (!mainConfig || typeof mainConfig !== 'object') {
throw new Error('Invalid configuration object');
}
console.log(`${module.name}: ${configKeys.length} config(s) loaded`);
results.push({ name: module.name, status: 'success', configs: configKeys });
} catch (error) {
console.log(`${module.name}: ${error.message}`);
results.push({ name: module.name, status: 'error', error: error.message });
}
}
// Test main index exports
try {
const indexExports = require('./dist/index.js');
const exportCount = Object.keys(indexExports).length;
console.log(`\n✅ Index exports: ${exportCount} modules exported`);
results.push({ name: 'Index', status: 'success', exports: exportCount });
} catch (error) {
console.log(`\n❌ Index exports: ${error.message}`);
results.push({ name: 'Index', status: 'error', error: error.message });
}
// Summary
const successful = results.filter(r => r.status === 'success').length;
const total = results.length;
console.log(`\n📊 Validation Summary:`);
console.log(` Total modules: ${total}`);
console.log(` Successful: ${successful}`);
console.log(` Failed: ${total - successful}`);
if (successful === total) {
console.log('\n🎉 All configuration modules validated successfully!');
process.exit(0);
} else {
console.log('\n⚠ Some configuration modules failed validation.');
process.exit(1);
}
} catch (error) {
console.error('❌ Validation script failed:', error.message);
process.exit(1);
}
#!/usr/bin/env node
/**
* Configuration Validation Script
* Tests that all configuration modules can be loaded and validated
*/
// Set test environment variables
process.env.NODE_ENV = 'test';
process.env.PORT = '3001';
// Database configs
process.env.DB_HOST = 'localhost';
process.env.DB_PORT = '5432';
process.env.DB_NAME = 'test_db';
process.env.DB_USER = 'test_user';
process.env.DB_PASSWORD = 'test_pass';
// QuestDB configs
process.env.QUESTDB_HOST = 'localhost';
process.env.QUESTDB_HTTP_PORT = '9000';
process.env.QUESTDB_PG_PORT = '8812';
// MongoDB configs
process.env.MONGODB_HOST = 'localhost';
process.env.MONGODB_PORT = '27017';
process.env.MONGODB_DATABASE = 'test_db';
// Dragonfly configs
process.env.DRAGONFLY_HOST = 'localhost';
process.env.DRAGONFLY_PORT = '6379';
// Monitoring configs
process.env.PROMETHEUS_HOST = 'localhost';
process.env.PROMETHEUS_PORT = '9090';
process.env.GRAFANA_HOST = 'localhost';
process.env.GRAFANA_PORT = '3000';
// Loki configs
process.env.LOKI_HOST = 'localhost';
process.env.LOKI_PORT = '3100';
// Logging configs
process.env.LOG_LEVEL = 'info';
process.env.LOG_FORMAT = 'json';
try {
console.log('🔍 Validating configuration modules...\n');
// Test each configuration module
const modules = [
{ name: 'Database', path: './dist/database.js' },
{ name: 'QuestDB', path: './dist/questdb.js' },
{ name: 'MongoDB', path: './dist/mongodb.js' },
{ name: 'Dragonfly', path: './dist/dragonfly.js' },
{ name: 'Monitoring', path: './dist/monitoring.js' },
{ name: 'Loki', path: './dist/loki.js' },
{ name: 'Logging', path: './dist/logging.js' },
];
const results = [];
for (const module of modules) {
try {
const config = require(module.path);
const configKeys = Object.keys(config);
if (configKeys.length === 0) {
throw new Error('No exported configuration found');
}
// Try to access the main config object
const mainConfig = config[configKeys[0]];
if (!mainConfig || typeof mainConfig !== 'object') {
throw new Error('Invalid configuration object');
}
console.log(`${module.name}: ${configKeys.length} config(s) loaded`);
results.push({ name: module.name, status: 'success', configs: configKeys });
} catch (error) {
console.log(`${module.name}: ${error.message}`);
results.push({ name: module.name, status: 'error', error: error.message });
}
}
// Test main index exports
try {
const indexExports = require('./dist/index.js');
const exportCount = Object.keys(indexExports).length;
console.log(`\n✅ Index exports: ${exportCount} modules exported`);
results.push({ name: 'Index', status: 'success', exports: exportCount });
} catch (error) {
console.log(`\n❌ Index exports: ${error.message}`);
results.push({ name: 'Index', status: 'error', error: error.message });
}
// Summary
const successful = results.filter(r => r.status === 'success').length;
const total = results.length;
console.log(`\n📊 Validation Summary:`);
console.log(` Total modules: ${total}`);
console.log(` Successful: ${successful}`);
console.log(` Failed: ${total - successful}`);
if (successful === total) {
console.log('\n🎉 All configuration modules validated successfully!');
process.exit(0);
} else {
console.log('\n⚠ Some configuration modules failed validation.');
process.exit(1);
}
} catch (error) {
console.error('❌ Validation script failed:', error.message);
process.exit(1);
}

View file

@ -1,24 +1,24 @@
{
"name": "@stock-bot/data-adjustments",
"version": "1.0.0",
"description": "Stock split and dividend adjustment utilities for market data",
"type": "module",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsc",
"test": "bun test",
"test:watch": "bun test --watch"
},
"dependencies": {
"@stock-bot/types": "*",
"@stock-bot/logger": "*"
},
"devDependencies": {
"typescript": "^5.4.5",
"bun-types": "^1.1.12"
},
"peerDependencies": {
"typescript": "^5.0.0"
}
}
{
"name": "@stock-bot/data-adjustments",
"version": "1.0.0",
"description": "Stock split and dividend adjustment utilities for market data",
"type": "module",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"scripts": {
"build": "tsc",
"test": "bun test",
"test:watch": "bun test --watch"
},
"dependencies": {
"@stock-bot/types": "*",
"@stock-bot/logger": "*"
},
"devDependencies": {
"typescript": "^5.4.5",
"bun-types": "^1.1.12"
},
"peerDependencies": {
"typescript": "^5.0.0"
}
}

View file

@ -1,33 +1,33 @@
{
"name": "@stock-bot/data-frame",
"version": "1.0.0",
"description": "DataFrame library for time series data manipulation",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/logger": "*",
"@stock-bot/utils": "*"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}
{
"name": "@stock-bot/data-frame",
"version": "1.0.0",
"description": "DataFrame library for time series data manipulation",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/logger": "*",
"@stock-bot/utils": "*"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}

View file

@ -1,485 +1,485 @@
import { getLogger } from '@stock-bot/logger';
export interface DataFrameRow {
[key: string]: any;
}
export interface DataFrameOptions {
index?: string;
columns?: string[];
dtypes?: Record<string, 'number' | 'string' | 'boolean' | 'date'>;
}
export interface GroupByResult {
[key: string]: DataFrame;
}
export interface AggregationFunction {
(values: any[]): any;
}
export class DataFrame {
private data: DataFrameRow[];
private _columns: string[];
private _index: string;
private _dtypes: Record<string, 'number' | 'string' | 'boolean' | 'date'>;
private logger = getLogger('dataframe');
constructor(data: DataFrameRow[] = [], options: DataFrameOptions = {}) {
this.data = [...data];
this._index = options.index || 'index';
this._columns = options.columns || this.inferColumns();
this._dtypes = options.dtypes || {};
this.validateAndCleanData();
}
private inferColumns(): string[] {
if (this.data.length === 0) return [];
const columns = new Set<string>();
for (const row of this.data) {
Object.keys(row).forEach(key => columns.add(key));
}
return Array.from(columns).sort();
}
private validateAndCleanData(): void {
if (this.data.length === 0) return;
// Ensure all rows have the same columns
for (let i = 0; i < this.data.length; i++) {
const row = this.data[i];
// Add missing columns with null values
for (const col of this._columns) {
if (!(col in row)) {
row[col] = null;
}
}
// Apply data type conversions
for (const [col, dtype] of Object.entries(this._dtypes)) {
if (col in row && row[col] !== null) {
row[col] = this.convertValue(row[col], dtype);
}
}
}
}
private convertValue(value: any, dtype: string): any {
switch (dtype) {
case 'number':
return typeof value === 'number' ? value : parseFloat(value);
case 'string':
return String(value);
case 'boolean':
return Boolean(value);
case 'date':
return value instanceof Date ? value : new Date(value);
default:
return value;
}
}
// Basic properties
get columns(): string[] {
return [...this._columns];
}
get index(): string {
return this._index;
}
get length(): number {
return this.data.length;
}
get shape(): [number, number] {
return [this.data.length, this._columns.length];
}
get empty(): boolean {
return this.data.length === 0;
}
// Data access methods
head(n: number = 5): DataFrame {
return new DataFrame(this.data.slice(0, n), {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
tail(n: number = 5): DataFrame {
return new DataFrame(this.data.slice(-n), {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
iloc(start: number, end?: number): DataFrame {
const slice = end !== undefined ? this.data.slice(start, end) : this.data.slice(start);
return new DataFrame(slice, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
at(index: number, column: string): any {
if (index < 0 || index >= this.data.length) {
throw new Error(`Index ${index} out of bounds`);
}
return this.data[index][column];
}
// Column operations
select(columns: string[]): DataFrame {
const validColumns = columns.filter(col => this._columns.includes(col));
const newData = this.data.map(row => {
const newRow: DataFrameRow = {};
for (const col of validColumns) {
newRow[col] = row[col];
}
return newRow;
});
return new DataFrame(newData, {
columns: validColumns,
index: this._index,
dtypes: this.filterDtypes(validColumns)
});
}
drop(columns: string[]): DataFrame {
const remainingColumns = this._columns.filter(col => !columns.includes(col));
return this.select(remainingColumns);
}
getColumn(column: string): any[] {
if (!this._columns.includes(column)) {
throw new Error(`Column '${column}' not found`);
}
return this.data.map(row => row[column]);
}
setColumn(column: string, values: any[]): DataFrame {
if (values.length !== this.data.length) {
throw new Error('Values length must match DataFrame length');
}
const newData = this.data.map((row, index) => ({
...row,
[column]: values[index]
}));
const newColumns = this._columns.includes(column)
? this._columns
: [...this._columns, column];
return new DataFrame(newData, {
columns: newColumns,
index: this._index,
dtypes: this._dtypes
});
}
// Filtering
filter(predicate: (row: DataFrameRow, index: number) => boolean): DataFrame {
const filteredData = this.data.filter(predicate);
return new DataFrame(filteredData, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
where(column: string, operator: '>' | '<' | '>=' | '<=' | '==' | '!=', value: any): DataFrame {
return this.filter(row => {
const cellValue = row[column];
switch (operator) {
case '>': return cellValue > value;
case '<': return cellValue < value;
case '>=': return cellValue >= value;
case '<=': return cellValue <= value;
case '==': return cellValue === value;
case '!=': return cellValue !== value;
default: return false;
}
});
}
// Sorting
sort(column: string, ascending: boolean = true): DataFrame {
const sortedData = [...this.data].sort((a, b) => {
const aVal = a[column];
const bVal = b[column];
if (aVal === bVal) return 0;
const comparison = aVal > bVal ? 1 : -1;
return ascending ? comparison : -comparison;
});
return new DataFrame(sortedData, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
// Aggregation
groupBy(column: string): GroupByResult {
const groups: Record<string, DataFrameRow[]> = {};
for (const row of this.data) {
const key = String(row[column]);
if (!groups[key]) {
groups[key] = [];
}
groups[key].push(row);
}
const result: GroupByResult = {};
for (const [key, rows] of Object.entries(groups)) {
result[key] = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
return result;
}
agg(aggregations: Record<string, AggregationFunction>): DataFrameRow {
const result: DataFrameRow = {};
for (const [column, func] of Object.entries(aggregations)) {
if (!this._columns.includes(column)) {
throw new Error(`Column '${column}' not found`);
}
const values = this.getColumn(column).filter(val => val !== null && val !== undefined);
result[column] = func(values);
}
return result;
}
// Statistical methods
mean(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return values.reduce((sum, val) => sum + val, 0) / values.length;
}
sum(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return values.reduce((sum, val) => sum + val, 0);
}
min(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return Math.min(...values);
}
max(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return Math.max(...values);
}
std(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
const variance = values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / values.length;
return Math.sqrt(variance);
}
// Time series specific methods
resample(timeColumn: string, frequency: string): DataFrame {
// Simple resampling implementation
// For production, you'd want more sophisticated time-based grouping
const sorted = this.sort(timeColumn);
switch (frequency) {
case '1H':
return this.resampleByHour(sorted, timeColumn);
case '1D':
return this.resampleByDay(sorted, timeColumn);
default:
throw new Error(`Unsupported frequency: ${frequency}`);
}
}
private resampleByHour(sorted: DataFrame, timeColumn: string): DataFrame {
const groups: Record<string, DataFrameRow[]> = {};
for (const row of sorted.data) {
const date = new Date(row[timeColumn]);
const hourKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}-${date.getHours()}`;
if (!groups[hourKey]) {
groups[hourKey] = [];
}
groups[hourKey].push(row);
}
const aggregatedData: DataFrameRow[] = [];
for (const [key, rows] of Object.entries(groups)) {
const tempDf = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
// Create OHLCV aggregation
const aggregated: DataFrameRow = {
[timeColumn]: rows[0][timeColumn],
open: rows[0].close || rows[0].price,
high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'),
low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'),
close: rows[rows.length - 1].close || rows[rows.length - 1].price,
volume: tempDf.sum('volume') || 0
};
aggregatedData.push(aggregated);
}
return new DataFrame(aggregatedData);
}
private resampleByDay(sorted: DataFrame, timeColumn: string): DataFrame {
// Similar to resampleByHour but group by day
const groups: Record<string, DataFrameRow[]> = {};
for (const row of sorted.data) {
const date = new Date(row[timeColumn]);
const dayKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}`;
if (!groups[dayKey]) {
groups[dayKey] = [];
}
groups[dayKey].push(row);
}
const aggregatedData: DataFrameRow[] = [];
for (const [key, rows] of Object.entries(groups)) {
const tempDf = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
const aggregated: DataFrameRow = {
[timeColumn]: rows[0][timeColumn],
open: rows[0].close || rows[0].price,
high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'),
low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'),
close: rows[rows.length - 1].close || rows[rows.length - 1].price,
volume: tempDf.sum('volume') || 0
};
aggregatedData.push(aggregated);
}
return new DataFrame(aggregatedData);
}
// Utility methods
copy(): DataFrame {
return new DataFrame(this.data.map(row => ({ ...row })), {
columns: this._columns,
index: this._index,
dtypes: { ...this._dtypes }
});
}
concat(other: DataFrame): DataFrame {
const combinedData = [...this.data, ...other.data];
const combinedColumns = Array.from(new Set([...this._columns, ...other._columns]));
return new DataFrame(combinedData, {
columns: combinedColumns,
index: this._index,
dtypes: { ...this._dtypes, ...other._dtypes }
});
}
toArray(): DataFrameRow[] {
return this.data.map(row => ({ ...row }));
}
toJSON(): string {
return JSON.stringify(this.data);
}
private filterDtypes(columns: string[]): Record<string, 'number' | 'string' | 'boolean' | 'date'> {
const filtered: Record<string, 'number' | 'string' | 'boolean' | 'date'> = {};
for (const col of columns) {
if (this._dtypes[col]) {
filtered[col] = this._dtypes[col];
}
}
return filtered;
}
// Display method
toString(): string {
if (this.empty) {
return 'Empty DataFrame';
}
const maxRows = 10;
const displayData = this.data.slice(0, maxRows);
let result = `DataFrame (${this.length} rows x ${this._columns.length} columns)\n`;
result += this._columns.join('\t') + '\n';
result += '-'.repeat(this._columns.join('\t').length) + '\n';
for (const row of displayData) {
const values = this._columns.map(col => String(row[col] ?? 'null'));
result += values.join('\t') + '\n';
}
if (this.length > maxRows) {
result += `... (${this.length - maxRows} more rows)\n`;
}
return result;
}
}
// Factory functions
export function createDataFrame(data: DataFrameRow[], options?: DataFrameOptions): DataFrame {
return new DataFrame(data, options);
}
export function readCSV(csvData: string, options?: DataFrameOptions): DataFrame {
const lines = csvData.trim().split('\n');
if (lines.length === 0) {
return new DataFrame();
}
const headers = lines[0].split(',').map(h => h.trim());
const data: DataFrameRow[] = [];
for (let i = 1; i < lines.length; i++) {
const values = lines[i].split(',').map(v => v.trim());
const row: DataFrameRow = {};
for (let j = 0; j < headers.length; j++) {
row[headers[j]] = values[j] || null;
}
data.push(row);
}
return new DataFrame(data, {
columns: headers,
...options
});
import { getLogger } from '@stock-bot/logger';
export interface DataFrameRow {
[key: string]: any;
}
export interface DataFrameOptions {
index?: string;
columns?: string[];
dtypes?: Record<string, 'number' | 'string' | 'boolean' | 'date'>;
}
export interface GroupByResult {
[key: string]: DataFrame;
}
export interface AggregationFunction {
(values: any[]): any;
}
export class DataFrame {
private data: DataFrameRow[];
private _columns: string[];
private _index: string;
private _dtypes: Record<string, 'number' | 'string' | 'boolean' | 'date'>;
private logger = getLogger('dataframe');
constructor(data: DataFrameRow[] = [], options: DataFrameOptions = {}) {
this.data = [...data];
this._index = options.index || 'index';
this._columns = options.columns || this.inferColumns();
this._dtypes = options.dtypes || {};
this.validateAndCleanData();
}
private inferColumns(): string[] {
if (this.data.length === 0) return [];
const columns = new Set<string>();
for (const row of this.data) {
Object.keys(row).forEach(key => columns.add(key));
}
return Array.from(columns).sort();
}
private validateAndCleanData(): void {
if (this.data.length === 0) return;
// Ensure all rows have the same columns
for (let i = 0; i < this.data.length; i++) {
const row = this.data[i];
// Add missing columns with null values
for (const col of this._columns) {
if (!(col in row)) {
row[col] = null;
}
}
// Apply data type conversions
for (const [col, dtype] of Object.entries(this._dtypes)) {
if (col in row && row[col] !== null) {
row[col] = this.convertValue(row[col], dtype);
}
}
}
}
private convertValue(value: any, dtype: string): any {
switch (dtype) {
case 'number':
return typeof value === 'number' ? value : parseFloat(value);
case 'string':
return String(value);
case 'boolean':
return Boolean(value);
case 'date':
return value instanceof Date ? value : new Date(value);
default:
return value;
}
}
// Basic properties
get columns(): string[] {
return [...this._columns];
}
get index(): string {
return this._index;
}
get length(): number {
return this.data.length;
}
get shape(): [number, number] {
return [this.data.length, this._columns.length];
}
get empty(): boolean {
return this.data.length === 0;
}
// Data access methods
head(n: number = 5): DataFrame {
return new DataFrame(this.data.slice(0, n), {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
tail(n: number = 5): DataFrame {
return new DataFrame(this.data.slice(-n), {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
iloc(start: number, end?: number): DataFrame {
const slice = end !== undefined ? this.data.slice(start, end) : this.data.slice(start);
return new DataFrame(slice, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
at(index: number, column: string): any {
if (index < 0 || index >= this.data.length) {
throw new Error(`Index ${index} out of bounds`);
}
return this.data[index][column];
}
// Column operations
select(columns: string[]): DataFrame {
const validColumns = columns.filter(col => this._columns.includes(col));
const newData = this.data.map(row => {
const newRow: DataFrameRow = {};
for (const col of validColumns) {
newRow[col] = row[col];
}
return newRow;
});
return new DataFrame(newData, {
columns: validColumns,
index: this._index,
dtypes: this.filterDtypes(validColumns)
});
}
drop(columns: string[]): DataFrame {
const remainingColumns = this._columns.filter(col => !columns.includes(col));
return this.select(remainingColumns);
}
getColumn(column: string): any[] {
if (!this._columns.includes(column)) {
throw new Error(`Column '${column}' not found`);
}
return this.data.map(row => row[column]);
}
setColumn(column: string, values: any[]): DataFrame {
if (values.length !== this.data.length) {
throw new Error('Values length must match DataFrame length');
}
const newData = this.data.map((row, index) => ({
...row,
[column]: values[index]
}));
const newColumns = this._columns.includes(column)
? this._columns
: [...this._columns, column];
return new DataFrame(newData, {
columns: newColumns,
index: this._index,
dtypes: this._dtypes
});
}
// Filtering
filter(predicate: (row: DataFrameRow, index: number) => boolean): DataFrame {
const filteredData = this.data.filter(predicate);
return new DataFrame(filteredData, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
where(column: string, operator: '>' | '<' | '>=' | '<=' | '==' | '!=', value: any): DataFrame {
return this.filter(row => {
const cellValue = row[column];
switch (operator) {
case '>': return cellValue > value;
case '<': return cellValue < value;
case '>=': return cellValue >= value;
case '<=': return cellValue <= value;
case '==': return cellValue === value;
case '!=': return cellValue !== value;
default: return false;
}
});
}
// Sorting
sort(column: string, ascending: boolean = true): DataFrame {
const sortedData = [...this.data].sort((a, b) => {
const aVal = a[column];
const bVal = b[column];
if (aVal === bVal) return 0;
const comparison = aVal > bVal ? 1 : -1;
return ascending ? comparison : -comparison;
});
return new DataFrame(sortedData, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
// Aggregation
groupBy(column: string): GroupByResult {
const groups: Record<string, DataFrameRow[]> = {};
for (const row of this.data) {
const key = String(row[column]);
if (!groups[key]) {
groups[key] = [];
}
groups[key].push(row);
}
const result: GroupByResult = {};
for (const [key, rows] of Object.entries(groups)) {
result[key] = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
return result;
}
agg(aggregations: Record<string, AggregationFunction>): DataFrameRow {
const result: DataFrameRow = {};
for (const [column, func] of Object.entries(aggregations)) {
if (!this._columns.includes(column)) {
throw new Error(`Column '${column}' not found`);
}
const values = this.getColumn(column).filter(val => val !== null && val !== undefined);
result[column] = func(values);
}
return result;
}
// Statistical methods
mean(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return values.reduce((sum, val) => sum + val, 0) / values.length;
}
sum(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return values.reduce((sum, val) => sum + val, 0);
}
min(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return Math.min(...values);
}
max(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return Math.max(...values);
}
std(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
const variance = values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / values.length;
return Math.sqrt(variance);
}
// Time series specific methods
resample(timeColumn: string, frequency: string): DataFrame {
// Simple resampling implementation
// For production, you'd want more sophisticated time-based grouping
const sorted = this.sort(timeColumn);
switch (frequency) {
case '1H':
return this.resampleByHour(sorted, timeColumn);
case '1D':
return this.resampleByDay(sorted, timeColumn);
default:
throw new Error(`Unsupported frequency: ${frequency}`);
}
}
private resampleByHour(sorted: DataFrame, timeColumn: string): DataFrame {
const groups: Record<string, DataFrameRow[]> = {};
for (const row of sorted.data) {
const date = new Date(row[timeColumn]);
const hourKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}-${date.getHours()}`;
if (!groups[hourKey]) {
groups[hourKey] = [];
}
groups[hourKey].push(row);
}
const aggregatedData: DataFrameRow[] = [];
for (const [key, rows] of Object.entries(groups)) {
const tempDf = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
// Create OHLCV aggregation
const aggregated: DataFrameRow = {
[timeColumn]: rows[0][timeColumn],
open: rows[0].close || rows[0].price,
high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'),
low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'),
close: rows[rows.length - 1].close || rows[rows.length - 1].price,
volume: tempDf.sum('volume') || 0
};
aggregatedData.push(aggregated);
}
return new DataFrame(aggregatedData);
}
private resampleByDay(sorted: DataFrame, timeColumn: string): DataFrame {
// Similar to resampleByHour but group by day
const groups: Record<string, DataFrameRow[]> = {};
for (const row of sorted.data) {
const date = new Date(row[timeColumn]);
const dayKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}`;
if (!groups[dayKey]) {
groups[dayKey] = [];
}
groups[dayKey].push(row);
}
const aggregatedData: DataFrameRow[] = [];
for (const [key, rows] of Object.entries(groups)) {
const tempDf = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
const aggregated: DataFrameRow = {
[timeColumn]: rows[0][timeColumn],
open: rows[0].close || rows[0].price,
high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'),
low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'),
close: rows[rows.length - 1].close || rows[rows.length - 1].price,
volume: tempDf.sum('volume') || 0
};
aggregatedData.push(aggregated);
}
return new DataFrame(aggregatedData);
}
// Utility methods
copy(): DataFrame {
return new DataFrame(this.data.map(row => ({ ...row })), {
columns: this._columns,
index: this._index,
dtypes: { ...this._dtypes }
});
}
concat(other: DataFrame): DataFrame {
const combinedData = [...this.data, ...other.data];
const combinedColumns = Array.from(new Set([...this._columns, ...other._columns]));
return new DataFrame(combinedData, {
columns: combinedColumns,
index: this._index,
dtypes: { ...this._dtypes, ...other._dtypes }
});
}
toArray(): DataFrameRow[] {
return this.data.map(row => ({ ...row }));
}
toJSON(): string {
return JSON.stringify(this.data);
}
private filterDtypes(columns: string[]): Record<string, 'number' | 'string' | 'boolean' | 'date'> {
const filtered: Record<string, 'number' | 'string' | 'boolean' | 'date'> = {};
for (const col of columns) {
if (this._dtypes[col]) {
filtered[col] = this._dtypes[col];
}
}
return filtered;
}
// Display method
toString(): string {
if (this.empty) {
return 'Empty DataFrame';
}
const maxRows = 10;
const displayData = this.data.slice(0, maxRows);
let result = `DataFrame (${this.length} rows x ${this._columns.length} columns)\n`;
result += this._columns.join('\t') + '\n';
result += '-'.repeat(this._columns.join('\t').length) + '\n';
for (const row of displayData) {
const values = this._columns.map(col => String(row[col] ?? 'null'));
result += values.join('\t') + '\n';
}
if (this.length > maxRows) {
result += `... (${this.length - maxRows} more rows)\n`;
}
return result;
}
}
// Factory functions
export function createDataFrame(data: DataFrameRow[], options?: DataFrameOptions): DataFrame {
return new DataFrame(data, options);
}
export function readCSV(csvData: string, options?: DataFrameOptions): DataFrame {
const lines = csvData.trim().split('\n');
if (lines.length === 0) {
return new DataFrame();
}
const headers = lines[0].split(',').map(h => h.trim());
const data: DataFrameRow[] = [];
for (let i = 1; i < lines.length; i++) {
const values = lines[i].split(',').map(v => v.trim());
const row: DataFrameRow = {};
for (let j = 0; j < headers.length; j++) {
row[headers[j]] = values[j] || null;
}
data.push(row);
}
return new DataFrame(data, {
columns: headers,
...options
});
}

View file

@ -1,13 +1,13 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../logger" },
{ "path": "../utils" }
]
}
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../logger" },
{ "path": "../utils" }
]
}

View file

@ -1,10 +1,10 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/utils#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/utils#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}

View file

@ -1,35 +1,35 @@
{
"name": "@stock-bot/event-bus",
"version": "1.0.0",
"description": "Event bus library for inter-service communication",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/logger": "*",
"@stock-bot/config": "*",
"ioredis": "^5.3.2",
"eventemitter3": "^5.0.1"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}
{
"name": "@stock-bot/event-bus",
"version": "1.0.0",
"description": "Event bus library for inter-service communication",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/logger": "*",
"@stock-bot/config": "*",
"ioredis": "^5.3.2",
"eventemitter3": "^5.0.1"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}

File diff suppressed because it is too large Load diff

View file

@ -1,13 +1,13 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" },
{ "path": "../logger" }
]
}
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" },
{ "path": "../logger" }
]
}

View file

@ -1,10 +1,10 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}

View file

@ -1,283 +1,283 @@
# HTTP Client Library
A comprehensive HTTP client library for the Stock Bot platform with built-in support for:
- ✅ **Fetch API** - Modern, promise-based HTTP requests
- ✅ **Proxy Support** - HTTP, HTTPS, SOCKS4, and SOCKS5 proxies
- ✅ **Rate Limiting** - Configurable request rate limiting
- ✅ **Timeout Handling** - Request timeouts with abort controllers
- ✅ **Retry Logic** - Automatic retries with exponential backoff
- ✅ **TypeScript** - Full TypeScript support with type safety
- ✅ **Logging Integration** - Optional logger integration
## Installation
```bash
bun add @stock-bot/http
```
## Basic Usage
```typescript
import { HttpClient } from '@stock-bot/http';
// Create a client with default configuration
const client = new HttpClient();
// Make a GET request
const response = await client.get('https://api.example.com/data');
console.log(response.data);
// Make a POST request
const postResponse = await client.post('https://api.example.com/users', {
name: 'John Doe',
email: 'john@example.com'
});
```
## Advanced Configuration
```typescript
import { HttpClient } from '@stock-bot/http';
import { logger } from '@stock-bot/logger';
const client = new HttpClient({
baseURL: 'https://api.example.com',
timeout: 10000, // 10 seconds
retries: 3,
retryDelay: 1000, // 1 second base delay
defaultHeaders: {
'Authorization': 'Bearer token',
'User-Agent': 'Stock-Bot/1.0'
},
validateStatus: (status) => status < 400
}, logger);
```
## Proxy Support
### HTTP/HTTPS Proxy
```typescript
const client = new HttpClient({
proxy: {
type: 'http',
host: 'proxy.example.com',
port: 8080,
username: 'user', // optional
password: 'pass' // optional
}
});
```
### SOCKS Proxy
```typescript
const client = new HttpClient({
proxy: {
type: 'socks5',
host: 'socks-proxy.example.com',
port: 1080,
username: 'user', // optional
password: 'pass' // optional
}
});
```
## Rate Limiting
```typescript
const client = new HttpClient({
rateLimit: {
maxRequests: 100, // Max 100 requests
windowMs: 60 * 1000, // Per 1 minute
skipSuccessfulRequests: false,
skipFailedRequests: true // Don't count failed requests
}
});
// Check rate limit status
const status = client.getRateLimitStatus();
console.log(`${status.currentCount}/${status.maxRequests} requests used`);
```
## Request Methods
```typescript
// GET request
const getData = await client.get('/api/data');
// POST request with body
const postData = await client.post('/api/users', {
name: 'John',
email: 'john@example.com'
});
// PUT request
const putData = await client.put('/api/users/1', updatedUser);
// DELETE request
const deleteData = await client.delete('/api/users/1');
// PATCH request
const patchData = await client.patch('/api/users/1', { name: 'Jane' });
// Custom request
const customResponse = await client.request({
method: 'POST',
url: '/api/custom',
headers: { 'X-Custom': 'value' },
body: { data: 'custom' },
timeout: 5000
});
```
## Error Handling
```typescript
import { HttpError, TimeoutError, RateLimitError } from '@stock-bot/http';
try {
const response = await client.get('/api/data');
} catch (error) {
if (error instanceof TimeoutError) {
console.log('Request timed out');
} else if (error instanceof RateLimitError) {
console.log(`Rate limited: retry after ${error.retryAfter}ms`);
} else if (error instanceof HttpError) {
console.log(`HTTP error ${error.status}: ${error.message}`);
}
}
```
## Retry Configuration
```typescript
const client = new HttpClient({
retries: 3, // Retry up to 3 times
retryDelay: 1000, // Base delay of 1 second
// Exponential backoff: 1s, 2s, 4s
});
// Or per-request retry configuration
const response = await client.get('/api/data', {
retries: 5,
retryDelay: 500
});
```
## Timeout Handling
```typescript
// Global timeout
const client = new HttpClient({
timeout: 30000 // 30 seconds
});
// Per-request timeout
const response = await client.get('/api/data', {
timeout: 5000 // 5 seconds for this request
});
```
## Custom Status Validation
```typescript
const client = new HttpClient({
validateStatus: (status) => {
// Accept 2xx and 3xx status codes
return status >= 200 && status < 400;
}
});
// Or per-request validation
const response = await client.get('/api/data', {
validateStatus: (status) => status === 200 || status === 404
});
```
## TypeScript Support
The library is fully typed with TypeScript:
```typescript
interface User {
id: number;
name: string;
email: string;
}
// Response data is properly typed
const response = await client.get<User[]>('/api/users');
const users: User[] = response.data;
// Request configuration is validated
const config: RequestConfig = {
method: 'POST',
url: '/api/users',
body: { name: 'John' },
timeout: 5000
};
```
## Integration with Logger
```typescript
import { logger } from '@stock-bot/logger';
import { HttpClient } from '@stock-bot/http';
const client = new HttpClient({
baseURL: 'https://api.example.com'
}, logger);
// All requests will be logged with debug/warn/error levels
```
## Testing
```bash
# Run tests
bun test
# Run with coverage
bun test --coverage
# Watch mode
bun test --watch
```
## Features
### Proxy Support
- HTTP and HTTPS proxies
- SOCKS4 and SOCKS5 proxies
- Authentication support
- Automatic agent creation
### Rate Limiting
- Token bucket algorithm
- Configurable window and request limits
- Skip successful/failed requests options
- Real-time status monitoring
### Retry Logic
- Exponential backoff
- Configurable retry attempts
- Smart retry conditions (5xx errors only)
- Per-request retry override
### Error Handling
- Typed error classes
- Detailed error information
- Request/response context
- Timeout detection
### Performance
- Built on modern Fetch API
- Minimal dependencies
- Tree-shakeable exports
- TypeScript optimization
## License
MIT License - see LICENSE file for details.
# HTTP Client Library
A comprehensive HTTP client library for the Stock Bot platform with built-in support for:
- ✅ **Fetch API** - Modern, promise-based HTTP requests
- ✅ **Proxy Support** - HTTP, HTTPS, SOCKS4, and SOCKS5 proxies
- ✅ **Rate Limiting** - Configurable request rate limiting
- ✅ **Timeout Handling** - Request timeouts with abort controllers
- ✅ **Retry Logic** - Automatic retries with exponential backoff
- ✅ **TypeScript** - Full TypeScript support with type safety
- ✅ **Logging Integration** - Optional logger integration
## Installation
```bash
bun add @stock-bot/http
```
## Basic Usage
```typescript
import { HttpClient } from '@stock-bot/http';
// Create a client with default configuration
const client = new HttpClient();
// Make a GET request
const response = await client.get('https://api.example.com/data');
console.log(response.data);
// Make a POST request
const postResponse = await client.post('https://api.example.com/users', {
name: 'John Doe',
email: 'john@example.com'
});
```
## Advanced Configuration
```typescript
import { HttpClient } from '@stock-bot/http';
import { logger } from '@stock-bot/logger';
const client = new HttpClient({
baseURL: 'https://api.example.com',
timeout: 10000, // 10 seconds
retries: 3,
retryDelay: 1000, // 1 second base delay
defaultHeaders: {
'Authorization': 'Bearer token',
'User-Agent': 'Stock-Bot/1.0'
},
validateStatus: (status) => status < 400
}, logger);
```
## Proxy Support
### HTTP/HTTPS Proxy
```typescript
const client = new HttpClient({
proxy: {
type: 'http',
host: 'proxy.example.com',
port: 8080,
username: 'user', // optional
password: 'pass' // optional
}
});
```
### SOCKS Proxy
```typescript
const client = new HttpClient({
proxy: {
type: 'socks5',
host: 'socks-proxy.example.com',
port: 1080,
username: 'user', // optional
password: 'pass' // optional
}
});
```
## Rate Limiting
```typescript
const client = new HttpClient({
rateLimit: {
maxRequests: 100, // Max 100 requests
windowMs: 60 * 1000, // Per 1 minute
skipSuccessfulRequests: false,
skipFailedRequests: true // Don't count failed requests
}
});
// Check rate limit status
const status = client.getRateLimitStatus();
console.log(`${status.currentCount}/${status.maxRequests} requests used`);
```
## Request Methods
```typescript
// GET request
const getData = await client.get('/api/data');
// POST request with body
const postData = await client.post('/api/users', {
name: 'John',
email: 'john@example.com'
});
// PUT request
const putData = await client.put('/api/users/1', updatedUser);
// DELETE request
const deleteData = await client.delete('/api/users/1');
// PATCH request
const patchData = await client.patch('/api/users/1', { name: 'Jane' });
// Custom request
const customResponse = await client.request({
method: 'POST',
url: '/api/custom',
headers: { 'X-Custom': 'value' },
body: { data: 'custom' },
timeout: 5000
});
```
## Error Handling
```typescript
import { HttpError, TimeoutError, RateLimitError } from '@stock-bot/http';
try {
const response = await client.get('/api/data');
} catch (error) {
if (error instanceof TimeoutError) {
console.log('Request timed out');
} else if (error instanceof RateLimitError) {
console.log(`Rate limited: retry after ${error.retryAfter}ms`);
} else if (error instanceof HttpError) {
console.log(`HTTP error ${error.status}: ${error.message}`);
}
}
```
## Retry Configuration
```typescript
const client = new HttpClient({
retries: 3, // Retry up to 3 times
retryDelay: 1000, // Base delay of 1 second
// Exponential backoff: 1s, 2s, 4s
});
// Or per-request retry configuration
const response = await client.get('/api/data', {
retries: 5,
retryDelay: 500
});
```
## Timeout Handling
```typescript
// Global timeout
const client = new HttpClient({
timeout: 30000 // 30 seconds
});
// Per-request timeout
const response = await client.get('/api/data', {
timeout: 5000 // 5 seconds for this request
});
```
## Custom Status Validation
```typescript
const client = new HttpClient({
validateStatus: (status) => {
// Accept 2xx and 3xx status codes
return status >= 200 && status < 400;
}
});
// Or per-request validation
const response = await client.get('/api/data', {
validateStatus: (status) => status === 200 || status === 404
});
```
## TypeScript Support
The library is fully typed with TypeScript:
```typescript
interface User {
id: number;
name: string;
email: string;
}
// Response data is properly typed
const response = await client.get<User[]>('/api/users');
const users: User[] = response.data;
// Request configuration is validated
const config: RequestConfig = {
method: 'POST',
url: '/api/users',
body: { name: 'John' },
timeout: 5000
};
```
## Integration with Logger
```typescript
import { logger } from '@stock-bot/logger';
import { HttpClient } from '@stock-bot/http';
const client = new HttpClient({
baseURL: 'https://api.example.com'
}, logger);
// All requests will be logged with debug/warn/error levels
```
## Testing
```bash
# Run tests
bun test
# Run with coverage
bun test --coverage
# Watch mode
bun test --watch
```
## Features
### Proxy Support
- HTTP and HTTPS proxies
- SOCKS4 and SOCKS5 proxies
- Authentication support
- Automatic agent creation
### Rate Limiting
- Token bucket algorithm
- Configurable window and request limits
- Skip successful/failed requests options
- Real-time status monitoring
### Retry Logic
- Exponential backoff
- Configurable retry attempts
- Smart retry conditions (5xx errors only)
- Per-request retry override
### Error Handling
- Typed error classes
- Detailed error information
- Request/response context
- Timeout detection
### Performance
- Built on modern Fetch API
- Minimal dependencies
- Tree-shakeable exports
- TypeScript optimization
## License
MIT License - see LICENSE file for details.

View file

@ -1,44 +1,44 @@
{
"name": "@stock-bot/http",
"version": "1.0.0",
"description": "HTTP client library with proxy support, rate limiting, and timeout for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"test:watch": "bun test --watch",
"test:coverage": "bun test --coverage",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"axios": "^1.9.0",
"http-proxy-agent": "^7.0.2",
"https-proxy-agent": "^7.0.6",
"socks-proxy-agent": "^8.0.5"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
{
"name": "@stock-bot/http",
"version": "1.0.0",
"description": "HTTP client library with proxy support, rate limiting, and timeout for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"test:watch": "bun test --watch",
"test:coverage": "bun test --coverage",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"axios": "^1.9.0",
"http-proxy-agent": "^7.0.2",
"https-proxy-agent": "^7.0.6",
"socks-proxy-agent": "^8.0.5"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}

View file

@ -1,53 +1,53 @@
import axios, { type AxiosRequestConfig, type AxiosResponse } from 'axios';
import type { RequestConfig, HttpResponse } from '../types';
import type { RequestAdapter } from './types';
import { ProxyManager } from '../proxy-manager';
import { HttpError } from '../types';
/**
* Axios adapter for SOCKS proxies
*/
export class AxiosAdapter implements RequestAdapter {
canHandle(config: RequestConfig): boolean {
// Axios handles SOCKS proxies
return Boolean(config.proxy && (config.proxy.protocol === 'socks4' || config.proxy.protocol === 'socks5'));
}
async request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>> {
const { url, method = 'GET', headers, data, proxy } = config;
if (!proxy) {
throw new Error('Axios adapter requires proxy configuration');
}
// Create proxy configuration using ProxyManager
const axiosConfig: AxiosRequestConfig = {
...ProxyManager.createAxiosConfig(proxy),
url,
method,
headers,
data,
signal,
// Don't throw on non-2xx status codes - let caller handle
validateStatus: () => true,
}; const response: AxiosResponse<T> = await axios(axiosConfig);
const httpResponse: HttpResponse<T> = {
data: response.data,
status: response.status,
headers: response.headers as Record<string, string>,
ok: response.status >= 200 && response.status < 300,
};
// Throw HttpError for non-2xx status codes
if (!httpResponse.ok) {
throw new HttpError(
`Request failed with status ${response.status}`,
response.status,
httpResponse
);
}
return httpResponse;
}
}
import axios, { type AxiosRequestConfig, type AxiosResponse } from 'axios';
import type { RequestConfig, HttpResponse } from '../types';
import type { RequestAdapter } from './types';
import { ProxyManager } from '../proxy-manager';
import { HttpError } from '../types';
/**
* Axios adapter for SOCKS proxies
*/
export class AxiosAdapter implements RequestAdapter {
canHandle(config: RequestConfig): boolean {
// Axios handles SOCKS proxies
return Boolean(config.proxy && (config.proxy.protocol === 'socks4' || config.proxy.protocol === 'socks5'));
}
async request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>> {
const { url, method = 'GET', headers, data, proxy } = config;
if (!proxy) {
throw new Error('Axios adapter requires proxy configuration');
}
// Create proxy configuration using ProxyManager
const axiosConfig: AxiosRequestConfig = {
...ProxyManager.createAxiosConfig(proxy),
url,
method,
headers,
data,
signal,
// Don't throw on non-2xx status codes - let caller handle
validateStatus: () => true,
}; const response: AxiosResponse<T> = await axios(axiosConfig);
const httpResponse: HttpResponse<T> = {
data: response.data,
status: response.status,
headers: response.headers as Record<string, string>,
ok: response.status >= 200 && response.status < 300,
};
// Throw HttpError for non-2xx status codes
if (!httpResponse.ok) {
throw new HttpError(
`Request failed with status ${response.status}`,
response.status,
httpResponse
);
}
return httpResponse;
}
}

View file

@ -1,28 +1,28 @@
import type { RequestConfig } from '../types';
import type { RequestAdapter } from './types';
import { FetchAdapter } from './fetch-adapter';
import { AxiosAdapter } from './axios-adapter';
/**
* Factory for creating the appropriate request adapter
*/
export class AdapterFactory {
private static adapters: RequestAdapter[] = [
new AxiosAdapter(), // Check SOCKS first
new FetchAdapter(), // Fallback to fetch for everything else
];
/**
* Get the appropriate adapter for the given configuration
*/
static getAdapter(config: RequestConfig): RequestAdapter {
for (const adapter of this.adapters) {
if (adapter.canHandle(config)) {
return adapter;
}
}
// Fallback to fetch adapter
return new FetchAdapter();
}
}
import type { RequestConfig } from '../types';
import type { RequestAdapter } from './types';
import { FetchAdapter } from './fetch-adapter';
import { AxiosAdapter } from './axios-adapter';
/**
* Factory for creating the appropriate request adapter
*/
export class AdapterFactory {
private static adapters: RequestAdapter[] = [
new AxiosAdapter(), // Check SOCKS first
new FetchAdapter(), // Fallback to fetch for everything else
];
/**
* Get the appropriate adapter for the given configuration
*/
static getAdapter(config: RequestConfig): RequestAdapter {
for (const adapter of this.adapters) {
if (adapter.canHandle(config)) {
return adapter;
}
}
// Fallback to fetch adapter
return new FetchAdapter();
}
}

View file

@ -1,66 +1,66 @@
import type { RequestConfig, HttpResponse } from '../types';
import type { RequestAdapter } from './types';
import { ProxyManager } from '../proxy-manager';
import { HttpError } from '../types';
/**
* Fetch adapter for HTTP/HTTPS proxies and non-proxy requests
*/
export class FetchAdapter implements RequestAdapter {
canHandle(config: RequestConfig): boolean {
// Fetch handles non-proxy requests and HTTP/HTTPS proxies
return !config.proxy || config.proxy.protocol === 'http' || config.proxy.protocol === 'https';
}
async request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>> {
const { url, method = 'GET', headers, data, proxy } = config;
// Prepare fetch options
const fetchOptions: RequestInit = {
method,
headers,
signal,
};
// Add body for non-GET requests
if (data && method !== 'GET') {
fetchOptions.body = typeof data === 'string' ? data : JSON.stringify(data);
if (typeof data === 'object') {
fetchOptions.headers = { 'Content-Type': 'application/json', ...fetchOptions.headers };
}
}
// Add proxy if needed (using Bun's built-in proxy support)
if (proxy) {
(fetchOptions as any).proxy = ProxyManager.createProxyUrl(proxy);
} const response = await fetch(url, fetchOptions);
// Parse response based on content type
let responseData: T;
const contentType = response.headers.get('content-type') || '';
if (contentType.includes('application/json')) {
responseData = await response.json() as T;
} else {
responseData = await response.text() as T;
}
const httpResponse: HttpResponse<T> = {
data: responseData,
status: response.status,
headers: Object.fromEntries(response.headers.entries()),
ok: response.ok,
};
// Throw HttpError for non-2xx status codes
if (!response.ok) {
throw new HttpError(
`Request failed with status ${response.status}`,
response.status,
httpResponse
);
}
return httpResponse;
}
}
import type { RequestConfig, HttpResponse } from '../types';
import type { RequestAdapter } from './types';
import { ProxyManager } from '../proxy-manager';
import { HttpError } from '../types';
/**
* Fetch adapter for HTTP/HTTPS proxies and non-proxy requests
*/
export class FetchAdapter implements RequestAdapter {
canHandle(config: RequestConfig): boolean {
// Fetch handles non-proxy requests and HTTP/HTTPS proxies
return !config.proxy || config.proxy.protocol === 'http' || config.proxy.protocol === 'https';
}
async request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>> {
const { url, method = 'GET', headers, data, proxy } = config;
// Prepare fetch options
const fetchOptions: RequestInit = {
method,
headers,
signal,
};
// Add body for non-GET requests
if (data && method !== 'GET') {
fetchOptions.body = typeof data === 'string' ? data : JSON.stringify(data);
if (typeof data === 'object') {
fetchOptions.headers = { 'Content-Type': 'application/json', ...fetchOptions.headers };
}
}
// Add proxy if needed (using Bun's built-in proxy support)
if (proxy) {
(fetchOptions as any).proxy = ProxyManager.createProxyUrl(proxy);
} const response = await fetch(url, fetchOptions);
// Parse response based on content type
let responseData: T;
const contentType = response.headers.get('content-type') || '';
if (contentType.includes('application/json')) {
responseData = await response.json() as T;
} else {
responseData = await response.text() as T;
}
const httpResponse: HttpResponse<T> = {
data: responseData,
status: response.status,
headers: Object.fromEntries(response.headers.entries()),
ok: response.ok,
};
// Throw HttpError for non-2xx status codes
if (!response.ok) {
throw new HttpError(
`Request failed with status ${response.status}`,
response.status,
httpResponse
);
}
return httpResponse;
}
}

View file

@ -1,4 +1,4 @@
export * from './types';
export * from './fetch-adapter';
export * from './axios-adapter';
export * from './factory';
export * from './types';
export * from './fetch-adapter';
export * from './axios-adapter';
export * from './factory';

View file

@ -1,16 +1,16 @@
import type { RequestConfig, HttpResponse } from '../types';
/**
* Request adapter interface for different HTTP implementations
*/
export interface RequestAdapter {
/**
* Execute an HTTP request
*/
request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>>;
/**
* Check if this adapter can handle the given configuration
*/
canHandle(config: RequestConfig): boolean;
}
import type { RequestConfig, HttpResponse } from '../types';
/**
* Request adapter interface for different HTTP implementations
*/
export interface RequestAdapter {
/**
* Execute an HTTP request
*/
request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>>;
/**
* Check if this adapter can handle the given configuration
*/
canHandle(config: RequestConfig): boolean;
}

View file

@ -1,155 +1,155 @@
import type { Logger } from '@stock-bot/logger';
import type {
HttpClientConfig,
RequestConfig,
HttpResponse,
} from './types';
import { HttpError } from './types';
import { ProxyManager } from './proxy-manager';
import { AdapterFactory } from './adapters/index';
export class HttpClient {
private readonly config: HttpClientConfig;
private readonly logger?: Logger;
constructor(config: HttpClientConfig = {}, logger?: Logger) {
this.config = config;
this.logger = logger?.child('http-client');
}
// Convenience methods
async get<T = any>(url: string, config: Omit<RequestConfig, 'method' | 'url'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'GET', url });
}
async post<T = any>(url: string, data?: any, config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'POST', url, data });
}
async put<T = any>(url: string, data?: any, config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'PUT', url, data });
}
async del<T = any>(url: string, config: Omit<RequestConfig, 'method' | 'url'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'DELETE', url });
}
async patch<T = any>(url: string, data?: any, config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'PATCH', url, data });
}
/**
* Main request method - clean and simple
*/
async request<T = any>(config: RequestConfig): Promise<HttpResponse<T>> {
const finalConfig = this.mergeConfig(config);
const startTime = Date.now();
this.logger?.debug('Making HTTP request', {
method: finalConfig.method,
url: finalConfig.url,
hasProxy: !!finalConfig.proxy
});
try {
const response = await this.executeRequest<T>(finalConfig);
response.responseTime = Date.now() - startTime;
this.logger?.debug('HTTP request successful', {
method: finalConfig.method,
url: finalConfig.url,
status: response.status,
responseTime: response.responseTime,
});
return response;
} catch (error) {
if( this.logger?.getServiceName() === 'proxy-service' ) {
this.logger?.debug('HTTP request failed', {
method: finalConfig.method,
url: finalConfig.url,
error: (error as Error).message,
});
}else{
this.logger?.warn('HTTP request failed', {
method: finalConfig.method,
url: finalConfig.url,
error: (error as Error).message,
});
}
throw error;
}
}
/**
* Execute request with timeout handling - no race conditions
*/ private async executeRequest<T>(config: RequestConfig): Promise<HttpResponse<T>> {
const timeout = config.timeout ?? this.config.timeout ?? 30000;
const controller = new AbortController();
const startTime = Date.now();
let timeoutId: NodeJS.Timeout | undefined;
// Set up timeout
// Create a timeout promise that will reject
const timeoutPromise = new Promise<never>((_, reject) => {
timeoutId = setTimeout(() => {
const elapsed = Date.now() - startTime;
this.logger?.debug('Request timeout triggered', {
url: config.url,
method: config.method,
timeout,
elapsed
});
// Attempt to abort (may or may not work with Bun)
controller.abort();
// Force rejection regardless of signal behavior
reject(new HttpError(`Request timeout after ${timeout}ms (elapsed: ${elapsed}ms)`));
}, timeout);
});
try {
// Get the appropriate adapter
const adapter = AdapterFactory.getAdapter(config);
const response = await Promise.race([
adapter.request<T>(config, controller.signal),
timeoutPromise
]);
this.logger?.debug('Adapter request successful', { url: config.url, elapsedMs: Date.now() - startTime });
// Clear timeout on success
clearTimeout(timeoutId);
return response;
} catch (error) {
const elapsed = Date.now() - startTime;
this.logger?.debug('Adapter failed successful', { url: config.url, elapsedMs: Date.now() - startTime });
clearTimeout(timeoutId);
// Handle timeout
if (controller.signal.aborted) {
throw new HttpError(`Request timeout after ${timeout}ms`);
}
// Re-throw other errors
if (error instanceof HttpError) {
throw error;
}
throw new HttpError(`Request failed: ${(error as Error).message}`);
}
}
/**
* Merge configs with defaults
*/
private mergeConfig(config: RequestConfig): RequestConfig {
return {
...config,
headers: { ...this.config.headers, ...config.headers },
timeout: config.timeout ?? this.config.timeout,
};
}
}
import type { Logger } from '@stock-bot/logger';
import type {
HttpClientConfig,
RequestConfig,
HttpResponse,
} from './types';
import { HttpError } from './types';
import { ProxyManager } from './proxy-manager';
import { AdapterFactory } from './adapters/index';
export class HttpClient {
private readonly config: HttpClientConfig;
private readonly logger?: Logger;
constructor(config: HttpClientConfig = {}, logger?: Logger) {
this.config = config;
this.logger = logger?.child('http-client');
}
// Convenience methods
async get<T = any>(url: string, config: Omit<RequestConfig, 'method' | 'url'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'GET', url });
}
async post<T = any>(url: string, data?: any, config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'POST', url, data });
}
async put<T = any>(url: string, data?: any, config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'PUT', url, data });
}
async del<T = any>(url: string, config: Omit<RequestConfig, 'method' | 'url'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'DELETE', url });
}
async patch<T = any>(url: string, data?: any, config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'PATCH', url, data });
}
/**
* Main request method - clean and simple
*/
async request<T = any>(config: RequestConfig): Promise<HttpResponse<T>> {
const finalConfig = this.mergeConfig(config);
const startTime = Date.now();
this.logger?.debug('Making HTTP request', {
method: finalConfig.method,
url: finalConfig.url,
hasProxy: !!finalConfig.proxy
});
try {
const response = await this.executeRequest<T>(finalConfig);
response.responseTime = Date.now() - startTime;
this.logger?.debug('HTTP request successful', {
method: finalConfig.method,
url: finalConfig.url,
status: response.status,
responseTime: response.responseTime,
});
return response;
} catch (error) {
if( this.logger?.getServiceName() === 'proxy-service' ) {
this.logger?.debug('HTTP request failed', {
method: finalConfig.method,
url: finalConfig.url,
error: (error as Error).message,
});
}else{
this.logger?.warn('HTTP request failed', {
method: finalConfig.method,
url: finalConfig.url,
error: (error as Error).message,
});
}
throw error;
}
}
/**
* Execute request with timeout handling - no race conditions
*/ private async executeRequest<T>(config: RequestConfig): Promise<HttpResponse<T>> {
const timeout = config.timeout ?? this.config.timeout ?? 30000;
const controller = new AbortController();
const startTime = Date.now();
let timeoutId: NodeJS.Timeout | undefined;
// Set up timeout
// Create a timeout promise that will reject
const timeoutPromise = new Promise<never>((_, reject) => {
timeoutId = setTimeout(() => {
const elapsed = Date.now() - startTime;
this.logger?.debug('Request timeout triggered', {
url: config.url,
method: config.method,
timeout,
elapsed
});
// Attempt to abort (may or may not work with Bun)
controller.abort();
// Force rejection regardless of signal behavior
reject(new HttpError(`Request timeout after ${timeout}ms (elapsed: ${elapsed}ms)`));
}, timeout);
});
try {
// Get the appropriate adapter
const adapter = AdapterFactory.getAdapter(config);
const response = await Promise.race([
adapter.request<T>(config, controller.signal),
timeoutPromise
]);
this.logger?.debug('Adapter request successful', { url: config.url, elapsedMs: Date.now() - startTime });
// Clear timeout on success
clearTimeout(timeoutId);
return response;
} catch (error) {
const elapsed = Date.now() - startTime;
this.logger?.debug('Adapter failed successful', { url: config.url, elapsedMs: Date.now() - startTime });
clearTimeout(timeoutId);
// Handle timeout
if (controller.signal.aborted) {
throw new HttpError(`Request timeout after ${timeout}ms`);
}
// Re-throw other errors
if (error instanceof HttpError) {
throw error;
}
throw new HttpError(`Request failed: ${(error as Error).message}`);
}
}
/**
* Merge configs with defaults
*/
private mergeConfig(config: RequestConfig): RequestConfig {
return {
...config,
headers: { ...this.config.headers, ...config.headers },
timeout: config.timeout ?? this.config.timeout,
};
}
}

View file

@ -1,8 +1,8 @@
// Re-export all types and classes
export * from './types';
export * from './client';
export * from './proxy-manager';
export * from './adapters/index';
// Default export
export { HttpClient as default } from './client';
// Re-export all types and classes
export * from './types';
export * from './client';
export * from './proxy-manager';
export * from './adapters/index';
// Default export
export { HttpClient as default } from './client';

View file

@ -1,66 +1,66 @@
import axios, { AxiosRequestConfig, type AxiosInstance } from 'axios';
import { SocksProxyAgent } from 'socks-proxy-agent';
import { HttpsProxyAgent } from 'https-proxy-agent';
import { HttpProxyAgent } from 'http-proxy-agent';
import type { ProxyInfo } from './types';
export class ProxyManager {
/**
* Determine if we should use Bun fetch (HTTP/HTTPS) or Axios (SOCKS)
*/
static shouldUseBunFetch(proxy: ProxyInfo): boolean {
return proxy.protocol === 'http' || proxy.protocol === 'https';
}
/**
* Create proxy URL for both Bun fetch and Axios proxy agents
*/
static createProxyUrl(proxy: ProxyInfo): string {
const { protocol, host, port, username, password } = proxy;
if (username && password) {
return `${protocol}://${encodeURIComponent(username)}:${encodeURIComponent(password)}@${host}:${port}`;
}
return `${protocol}://${host}:${port}`;
}
/**
* Create appropriate agent for Axios based on proxy type
*/
static createProxyAgent(proxy: ProxyInfo) {
this.validateConfig(proxy);
const proxyUrl = this.createProxyUrl(proxy);
switch (proxy.protocol) {
case 'socks4':
case 'socks5':
// console.log(`Using SOCKS proxy: ${proxyUrl}`);
return new SocksProxyAgent(proxyUrl);
case 'http':
return new HttpProxyAgent(proxyUrl);
case 'https':
return new HttpsProxyAgent(proxyUrl);
default:
throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`);
}
}
/**
* Create Axios instance with proxy configuration
*/
static createAxiosConfig(proxy: ProxyInfo): AxiosRequestConfig {
const agent = this.createProxyAgent(proxy);
return {
httpAgent: agent,
httpsAgent: agent,
};
}
/**
* Simple proxy config validation
*/
static validateConfig(proxy: ProxyInfo): void {
if (!proxy.host || !proxy.port) {
throw new Error('Proxy host and port are required');
}
if (!['http', 'https', 'socks4', 'socks5'].includes(proxy.protocol)) {
throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`);
}
}
}
import axios, { AxiosRequestConfig, type AxiosInstance } from 'axios';
import { SocksProxyAgent } from 'socks-proxy-agent';
import { HttpsProxyAgent } from 'https-proxy-agent';
import { HttpProxyAgent } from 'http-proxy-agent';
import type { ProxyInfo } from './types';
export class ProxyManager {
/**
* Determine if we should use Bun fetch (HTTP/HTTPS) or Axios (SOCKS)
*/
static shouldUseBunFetch(proxy: ProxyInfo): boolean {
return proxy.protocol === 'http' || proxy.protocol === 'https';
}
/**
* Create proxy URL for both Bun fetch and Axios proxy agents
*/
static createProxyUrl(proxy: ProxyInfo): string {
const { protocol, host, port, username, password } = proxy;
if (username && password) {
return `${protocol}://${encodeURIComponent(username)}:${encodeURIComponent(password)}@${host}:${port}`;
}
return `${protocol}://${host}:${port}`;
}
/**
* Create appropriate agent for Axios based on proxy type
*/
static createProxyAgent(proxy: ProxyInfo) {
this.validateConfig(proxy);
const proxyUrl = this.createProxyUrl(proxy);
switch (proxy.protocol) {
case 'socks4':
case 'socks5':
// console.log(`Using SOCKS proxy: ${proxyUrl}`);
return new SocksProxyAgent(proxyUrl);
case 'http':
return new HttpProxyAgent(proxyUrl);
case 'https':
return new HttpsProxyAgent(proxyUrl);
default:
throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`);
}
}
/**
* Create Axios instance with proxy configuration
*/
static createAxiosConfig(proxy: ProxyInfo): AxiosRequestConfig {
const agent = this.createProxyAgent(proxy);
return {
httpAgent: agent,
httpsAgent: agent,
};
}
/**
* Simple proxy config validation
*/
static validateConfig(proxy: ProxyInfo): void {
if (!proxy.host || !proxy.port) {
throw new Error('Proxy host and port are required');
}
if (!['http', 'https', 'socks4', 'socks5'].includes(proxy.protocol)) {
throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`);
}
}
}

View file

@ -1,49 +1,49 @@
// Minimal types for fast HTTP client
export type HttpMethod = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH';
export interface ProxyInfo {
source?: string;
protocol: 'http' | 'https' | 'socks4' | 'socks5';
host: string;
port: number;
username?: string;
password?: string;
url?: string; // Full proxy URL for adapters
isWorking?: boolean;
responseTime?: number;
error?: string;
checkedAt?: Date;
}
export interface HttpClientConfig {
timeout?: number;
headers?: Record<string, string>;
}
export interface RequestConfig {
method?: HttpMethod;
url: string;
headers?: Record<string, string>;
data?: any; // Changed from 'body' to 'data' for consistency
timeout?: number;
proxy?: ProxyInfo;
}
export interface HttpResponse<T = any> {
data: T;
status: number;
headers: Record<string, string>;
ok: boolean;
responseTime?: number;
}
export class HttpError extends Error {
constructor(
message: string,
public status?: number,
public response?: HttpResponse
) {
super(message);
this.name = 'HttpError';
}
}
// Minimal types for fast HTTP client
export type HttpMethod = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH';
export interface ProxyInfo {
source?: string;
protocol: 'http' | 'https' | 'socks4' | 'socks5';
host: string;
port: number;
username?: string;
password?: string;
url?: string; // Full proxy URL for adapters
isWorking?: boolean;
responseTime?: number;
error?: string;
checkedAt?: Date;
}
export interface HttpClientConfig {
timeout?: number;
headers?: Record<string, string>;
}
export interface RequestConfig {
method?: HttpMethod;
url: string;
headers?: Record<string, string>;
data?: any; // Changed from 'body' to 'data' for consistency
timeout?: number;
proxy?: ProxyInfo;
}
export interface HttpResponse<T = any> {
data: T;
status: number;
headers: Record<string, string>;
ok: boolean;
responseTime?: number;
}
export class HttpError extends Error {
constructor(
message: string,
public status?: number,
public response?: HttpResponse
) {
super(message);
this.name = 'HttpError';
}
}

View file

@ -1,154 +1,154 @@
import { describe, test, expect, beforeAll, afterAll } from 'bun:test';
import { HttpClient, HttpError } from '../src/index';
import { MockServer } from './mock-server';
/**
* Integration tests for HTTP client with real network scenarios
* These tests use external services and may be affected by network conditions
*/
let mockServer: MockServer;
let mockServerBaseUrl: string;
beforeAll(async () => {
mockServer = new MockServer();
await mockServer.start();
mockServerBaseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
await mockServer.stop();
});
describe('HTTP Integration Tests', () => {
let client: HttpClient;
beforeAll(() => {
client = new HttpClient({
timeout: 10000
});
});
describe('Real-world scenarios', () => {
test('should handle JSON API responses', async () => {
try {
const response = await client.get('https://jsonplaceholder.typicode.com/posts/1');
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('id');
expect(response.data).toHaveProperty('title');
expect(response.data).toHaveProperty('body');
} catch (error) {
console.warn('External API test skipped due to network issues:', (error as Error).message);
}
});
test('should handle large responses', async () => {
try {
const response = await client.get('https://jsonplaceholder.typicode.com/posts');
expect(response.status).toBe(200);
expect(Array.isArray(response.data)).toBe(true);
expect(response.data.length).toBeGreaterThan(0);
} catch (error) {
console.warn('Large response test skipped due to network issues:', (error as Error).message);
}
});
test('should handle POST with JSON data', async () => {
try {
const postData = {
title: 'Integration Test Post',
body: 'This is a test post from integration tests',
userId: 1
};
const response = await client.post('https://jsonplaceholder.typicode.com/posts', postData);
expect(response.status).toBe(201);
expect(response.data).toHaveProperty('id');
expect(response.data.title).toBe(postData.title);
} catch (error) {
console.warn('POST integration test skipped due to network issues:', (error as Error).message);
}
});
});
describe('Error scenarios with mock server', () => { test('should handle various HTTP status codes', async () => {
const successCodes = [200, 201];
const errorCodes = [400, 401, 403, 404, 500, 503];
// Test success codes
for (const statusCode of successCodes) {
const response = await client.get(`${mockServerBaseUrl}/status/${statusCode}`);
expect(response.status).toBe(statusCode);
}
// Test error codes (should throw HttpError)
for (const statusCode of errorCodes) {
await expect(
client.get(`${mockServerBaseUrl}/status/${statusCode}`)
).rejects.toThrow(HttpError);
}
});
test('should handle malformed responses gracefully', async () => {
// Mock server returns valid JSON, so this test verifies our client handles it properly
const response = await client.get(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(typeof response.data).toBe('object');
});
test('should handle concurrent requests', async () => {
const requests = Array.from({ length: 5 }, (_, i) =>
client.get(`${mockServerBaseUrl}/`, {
headers: { 'X-Request-ID': `req-${i}` }
})
);
const responses = await Promise.all(requests);
responses.forEach((response, index) => {
expect(response.status).toBe(200);
expect(response.data.headers).toHaveProperty('x-request-id', `req-${index}`);
});
});
});
describe('Performance and reliability', () => {
test('should handle rapid sequential requests', async () => {
const startTime = Date.now();
const requests = [];
for (let i = 0; i < 10; i++) {
requests.push(client.get(`${mockServerBaseUrl}/`));
}
const responses = await Promise.all(requests);
const endTime = Date.now();
expect(responses).toHaveLength(10);
responses.forEach(response => {
expect(response.status).toBe(200);
});
console.log(`Completed 10 requests in ${endTime - startTime}ms`);
});
test('should maintain connection efficiency', async () => {
const clientWithKeepAlive = new HttpClient({
timeout: 5000
});
const requests = Array.from({ length: 3 }, () =>
clientWithKeepAlive.get(`${mockServerBaseUrl}/`)
);
const responses = await Promise.all(requests);
responses.forEach(response => {
expect(response.status).toBe(200);
});
});
});
});
import { describe, test, expect, beforeAll, afterAll } from 'bun:test';
import { HttpClient, HttpError } from '../src/index';
import { MockServer } from './mock-server';
/**
* Integration tests for HTTP client with real network scenarios
* These tests use external services and may be affected by network conditions
*/
let mockServer: MockServer;
let mockServerBaseUrl: string;
beforeAll(async () => {
mockServer = new MockServer();
await mockServer.start();
mockServerBaseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
await mockServer.stop();
});
describe('HTTP Integration Tests', () => {
let client: HttpClient;
beforeAll(() => {
client = new HttpClient({
timeout: 10000
});
});
describe('Real-world scenarios', () => {
test('should handle JSON API responses', async () => {
try {
const response = await client.get('https://jsonplaceholder.typicode.com/posts/1');
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('id');
expect(response.data).toHaveProperty('title');
expect(response.data).toHaveProperty('body');
} catch (error) {
console.warn('External API test skipped due to network issues:', (error as Error).message);
}
});
test('should handle large responses', async () => {
try {
const response = await client.get('https://jsonplaceholder.typicode.com/posts');
expect(response.status).toBe(200);
expect(Array.isArray(response.data)).toBe(true);
expect(response.data.length).toBeGreaterThan(0);
} catch (error) {
console.warn('Large response test skipped due to network issues:', (error as Error).message);
}
});
test('should handle POST with JSON data', async () => {
try {
const postData = {
title: 'Integration Test Post',
body: 'This is a test post from integration tests',
userId: 1
};
const response = await client.post('https://jsonplaceholder.typicode.com/posts', postData);
expect(response.status).toBe(201);
expect(response.data).toHaveProperty('id');
expect(response.data.title).toBe(postData.title);
} catch (error) {
console.warn('POST integration test skipped due to network issues:', (error as Error).message);
}
});
});
describe('Error scenarios with mock server', () => { test('should handle various HTTP status codes', async () => {
const successCodes = [200, 201];
const errorCodes = [400, 401, 403, 404, 500, 503];
// Test success codes
for (const statusCode of successCodes) {
const response = await client.get(`${mockServerBaseUrl}/status/${statusCode}`);
expect(response.status).toBe(statusCode);
}
// Test error codes (should throw HttpError)
for (const statusCode of errorCodes) {
await expect(
client.get(`${mockServerBaseUrl}/status/${statusCode}`)
).rejects.toThrow(HttpError);
}
});
test('should handle malformed responses gracefully', async () => {
// Mock server returns valid JSON, so this test verifies our client handles it properly
const response = await client.get(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(typeof response.data).toBe('object');
});
test('should handle concurrent requests', async () => {
const requests = Array.from({ length: 5 }, (_, i) =>
client.get(`${mockServerBaseUrl}/`, {
headers: { 'X-Request-ID': `req-${i}` }
})
);
const responses = await Promise.all(requests);
responses.forEach((response, index) => {
expect(response.status).toBe(200);
expect(response.data.headers).toHaveProperty('x-request-id', `req-${index}`);
});
});
});
describe('Performance and reliability', () => {
test('should handle rapid sequential requests', async () => {
const startTime = Date.now();
const requests = [];
for (let i = 0; i < 10; i++) {
requests.push(client.get(`${mockServerBaseUrl}/`));
}
const responses = await Promise.all(requests);
const endTime = Date.now();
expect(responses).toHaveLength(10);
responses.forEach(response => {
expect(response.status).toBe(200);
});
console.log(`Completed 10 requests in ${endTime - startTime}ms`);
});
test('should maintain connection efficiency', async () => {
const clientWithKeepAlive = new HttpClient({
timeout: 5000
});
const requests = Array.from({ length: 3 }, () =>
clientWithKeepAlive.get(`${mockServerBaseUrl}/`)
);
const responses = await Promise.all(requests);
responses.forEach(response => {
expect(response.status).toBe(200);
});
});
});
});

View file

@ -1,159 +1,159 @@
import { describe, test, expect, beforeEach, beforeAll, afterAll } from 'bun:test';
import { HttpClient, HttpError, ProxyManager } from '../src/index';
import type { ProxyInfo } from '../src/types';
import { MockServer } from './mock-server';
// Global mock server instance
let mockServer: MockServer;
let mockServerBaseUrl: string;
beforeAll(async () => {
// Start mock server for all tests
mockServer = new MockServer();
await mockServer.start();
mockServerBaseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
// Stop mock server
await mockServer.stop();
});
describe('HttpClient', () => {
let client: HttpClient;
beforeEach(() => {
client = new HttpClient();
});
describe('Basic functionality', () => {
test('should create client with default config', () => {
expect(client).toBeInstanceOf(HttpClient);
});
test('should make GET request', async () => {
const response = await client.get(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('url');
expect(response.data).toHaveProperty('method', 'GET');
});
test('should make POST request with body', async () => {
const testData = {
title: 'Test Post',
body: 'Test body',
userId: 1,
};
const response = await client.post(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('data');
expect(response.data.data).toEqual(testData);
});
test('should handle custom headers', async () => {
const customHeaders = {
'X-Custom-Header': 'test-value',
'User-Agent': 'StockBot-HTTP-Client/1.0'
};
const response = await client.get(`${mockServerBaseUrl}/headers`, {
headers: customHeaders
});
expect(response.status).toBe(200);
expect(response.data.headers).toHaveProperty('x-custom-header', 'test-value');
expect(response.data.headers).toHaveProperty('user-agent', 'StockBot-HTTP-Client/1.0');
});
test('should handle timeout', async () => {
const clientWithTimeout = new HttpClient({ timeout: 1 }); // 1ms timeout
await expect(
clientWithTimeout.get('https://httpbin.org/delay/1')
).rejects.toThrow();
});
});
describe('Error handling', () => {
test('should handle HTTP errors', async () => {
await expect(
client.get(`${mockServerBaseUrl}/status/404`)
).rejects.toThrow(HttpError);
});
test('should handle network errors gracefully', async () => {
await expect(
client.get('https://nonexistent-domain-that-will-fail-12345.test')
).rejects.toThrow();
});
test('should handle invalid URLs', async () => {
await expect(
client.get('not:/a:valid/url')
).rejects.toThrow();
});
});
describe('HTTP methods', () => {
test('should make PUT request', async () => {
const testData = { id: 1, name: 'Updated' };
const response = await client.put(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
});
test('should make DELETE request', async () => {
const response = await client.del(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(response.data.method).toBe('DELETE');
});
test('should make PATCH request', async () => {
const testData = { name: 'Patched' };
const response = await client.patch(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
});
});
});
describe('ProxyManager', () => {
test('should determine when to use Bun fetch', () => {
const httpProxy: ProxyInfo = {
protocol: 'http',
host: 'proxy.example.com',
port: 8080
};
const socksProxy: ProxyInfo = {
protocol: 'socks5',
host: 'proxy.example.com',
port: 1080
};
expect(ProxyManager.shouldUseBunFetch(httpProxy)).toBe(true);
expect(ProxyManager.shouldUseBunFetch(socksProxy)).toBe(false);
});
test('should create proxy URL for Bun fetch', () => {
const proxy: ProxyInfo = {
protocol: 'http',
host: 'proxy.example.com',
port: 8080,
username: 'user',
password: 'pass' };
const proxyUrl = ProxyManager.createProxyUrl(proxy);
expect(proxyUrl).toBe('http://user:pass@proxy.example.com:8080');
});
test('should create proxy URL without credentials', () => {
const proxy: ProxyInfo = {
protocol: 'https',
host: 'proxy.example.com',
port: 8080 };
const proxyUrl = ProxyManager.createProxyUrl(proxy);
expect(proxyUrl).toBe('https://proxy.example.com:8080');
});
});
import { describe, test, expect, beforeEach, beforeAll, afterAll } from 'bun:test';
import { HttpClient, HttpError, ProxyManager } from '../src/index';
import type { ProxyInfo } from '../src/types';
import { MockServer } from './mock-server';
// Global mock server instance
let mockServer: MockServer;
let mockServerBaseUrl: string;
beforeAll(async () => {
// Start mock server for all tests
mockServer = new MockServer();
await mockServer.start();
mockServerBaseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
// Stop mock server
await mockServer.stop();
});
describe('HttpClient', () => {
let client: HttpClient;
beforeEach(() => {
client = new HttpClient();
});
describe('Basic functionality', () => {
test('should create client with default config', () => {
expect(client).toBeInstanceOf(HttpClient);
});
test('should make GET request', async () => {
const response = await client.get(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('url');
expect(response.data).toHaveProperty('method', 'GET');
});
test('should make POST request with body', async () => {
const testData = {
title: 'Test Post',
body: 'Test body',
userId: 1,
};
const response = await client.post(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('data');
expect(response.data.data).toEqual(testData);
});
test('should handle custom headers', async () => {
const customHeaders = {
'X-Custom-Header': 'test-value',
'User-Agent': 'StockBot-HTTP-Client/1.0'
};
const response = await client.get(`${mockServerBaseUrl}/headers`, {
headers: customHeaders
});
expect(response.status).toBe(200);
expect(response.data.headers).toHaveProperty('x-custom-header', 'test-value');
expect(response.data.headers).toHaveProperty('user-agent', 'StockBot-HTTP-Client/1.0');
});
test('should handle timeout', async () => {
const clientWithTimeout = new HttpClient({ timeout: 1 }); // 1ms timeout
await expect(
clientWithTimeout.get('https://httpbin.org/delay/1')
).rejects.toThrow();
});
});
describe('Error handling', () => {
test('should handle HTTP errors', async () => {
await expect(
client.get(`${mockServerBaseUrl}/status/404`)
).rejects.toThrow(HttpError);
});
test('should handle network errors gracefully', async () => {
await expect(
client.get('https://nonexistent-domain-that-will-fail-12345.test')
).rejects.toThrow();
});
test('should handle invalid URLs', async () => {
await expect(
client.get('not:/a:valid/url')
).rejects.toThrow();
});
});
describe('HTTP methods', () => {
test('should make PUT request', async () => {
const testData = { id: 1, name: 'Updated' };
const response = await client.put(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
});
test('should make DELETE request', async () => {
const response = await client.del(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(response.data.method).toBe('DELETE');
});
test('should make PATCH request', async () => {
const testData = { name: 'Patched' };
const response = await client.patch(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
});
});
});
describe('ProxyManager', () => {
test('should determine when to use Bun fetch', () => {
const httpProxy: ProxyInfo = {
protocol: 'http',
host: 'proxy.example.com',
port: 8080
};
const socksProxy: ProxyInfo = {
protocol: 'socks5',
host: 'proxy.example.com',
port: 1080
};
expect(ProxyManager.shouldUseBunFetch(httpProxy)).toBe(true);
expect(ProxyManager.shouldUseBunFetch(socksProxy)).toBe(false);
});
test('should create proxy URL for Bun fetch', () => {
const proxy: ProxyInfo = {
protocol: 'http',
host: 'proxy.example.com',
port: 8080,
username: 'user',
password: 'pass' };
const proxyUrl = ProxyManager.createProxyUrl(proxy);
expect(proxyUrl).toBe('http://user:pass@proxy.example.com:8080');
});
test('should create proxy URL without credentials', () => {
const proxy: ProxyInfo = {
protocol: 'https',
host: 'proxy.example.com',
port: 8080 };
const proxyUrl = ProxyManager.createProxyUrl(proxy);
expect(proxyUrl).toBe('https://proxy.example.com:8080');
});
});

View file

@ -1,131 +1,131 @@
import { describe, test, expect, beforeAll, afterAll } from 'bun:test';
import { MockServer } from './mock-server';
/**
* Tests for the MockServer utility
* Ensures our test infrastructure works correctly
*/
describe('MockServer', () => {
let mockServer: MockServer;
let baseUrl: string;
beforeAll(async () => {
mockServer = new MockServer();
await mockServer.start();
baseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
await mockServer.stop();
});
describe('Server lifecycle', () => {
test('should start and provide base URL', () => {
expect(baseUrl).toMatch(/^http:\/\/localhost:\d+$/);
expect(mockServer.getBaseUrl()).toBe(baseUrl);
});
test('should be reachable', async () => {
const response = await fetch(`${baseUrl}/`);
expect(response.ok).toBe(true);
});
});
describe('Status endpoints', () => {
test('should return correct status codes', async () => {
const statusCodes = [200, 201, 400, 401, 403, 404, 500, 503];
for (const status of statusCodes) {
const response = await fetch(`${baseUrl}/status/${status}`);
expect(response.status).toBe(status);
}
});
});
describe('Headers endpoint', () => {
test('should echo request headers', async () => {
const response = await fetch(`${baseUrl}/headers`, {
headers: {
'X-Test-Header': 'test-value',
'User-Agent': 'MockServer-Test'
} });
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.headers).toHaveProperty('x-test-header', 'test-value');
expect(data.headers).toHaveProperty('user-agent', 'MockServer-Test');
});
});
describe('Basic auth endpoint', () => {
test('should authenticate valid credentials', async () => {
const username = 'testuser';
const password = 'testpass';
const credentials = btoa(`${username}:${password}`);
const response = await fetch(`${baseUrl}/basic-auth/${username}/${password}`, {
headers: {
'Authorization': `Basic ${credentials}`
}
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.authenticated).toBe(true);
expect(data.user).toBe(username);
});
test('should reject invalid credentials', async () => {
const credentials = btoa('wrong:credentials');
const response = await fetch(`${baseUrl}/basic-auth/user/pass`, {
headers: {
'Authorization': `Basic ${credentials}`
}
});
expect(response.status).toBe(401);
});
test('should reject missing auth header', async () => {
const response = await fetch(`${baseUrl}/basic-auth/user/pass`);
expect(response.status).toBe(401);
});
});
describe('POST endpoint', () => {
test('should echo POST data', async () => {
const testData = {
message: 'Hello, MockServer!',
timestamp: Date.now()
};
const response = await fetch(`${baseUrl}/post`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(testData)
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.data).toEqual(testData);
expect(data.method).toBe('POST');
expect(data.headers).toHaveProperty('content-type', 'application/json');
});
});
describe('Default endpoint', () => {
test('should return request information', async () => {
const response = await fetch(`${baseUrl}/unknown-endpoint`);
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.url).toBe(`${baseUrl}/unknown-endpoint`);
expect(data.method).toBe('GET');
expect(data.headers).toBeDefined();
});
});
});
import { describe, test, expect, beforeAll, afterAll } from 'bun:test';
import { MockServer } from './mock-server';
/**
* Tests for the MockServer utility
* Ensures our test infrastructure works correctly
*/
describe('MockServer', () => {
let mockServer: MockServer;
let baseUrl: string;
beforeAll(async () => {
mockServer = new MockServer();
await mockServer.start();
baseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
await mockServer.stop();
});
describe('Server lifecycle', () => {
test('should start and provide base URL', () => {
expect(baseUrl).toMatch(/^http:\/\/localhost:\d+$/);
expect(mockServer.getBaseUrl()).toBe(baseUrl);
});
test('should be reachable', async () => {
const response = await fetch(`${baseUrl}/`);
expect(response.ok).toBe(true);
});
});
describe('Status endpoints', () => {
test('should return correct status codes', async () => {
const statusCodes = [200, 201, 400, 401, 403, 404, 500, 503];
for (const status of statusCodes) {
const response = await fetch(`${baseUrl}/status/${status}`);
expect(response.status).toBe(status);
}
});
});
describe('Headers endpoint', () => {
test('should echo request headers', async () => {
const response = await fetch(`${baseUrl}/headers`, {
headers: {
'X-Test-Header': 'test-value',
'User-Agent': 'MockServer-Test'
} });
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.headers).toHaveProperty('x-test-header', 'test-value');
expect(data.headers).toHaveProperty('user-agent', 'MockServer-Test');
});
});
describe('Basic auth endpoint', () => {
test('should authenticate valid credentials', async () => {
const username = 'testuser';
const password = 'testpass';
const credentials = btoa(`${username}:${password}`);
const response = await fetch(`${baseUrl}/basic-auth/${username}/${password}`, {
headers: {
'Authorization': `Basic ${credentials}`
}
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.authenticated).toBe(true);
expect(data.user).toBe(username);
});
test('should reject invalid credentials', async () => {
const credentials = btoa('wrong:credentials');
const response = await fetch(`${baseUrl}/basic-auth/user/pass`, {
headers: {
'Authorization': `Basic ${credentials}`
}
});
expect(response.status).toBe(401);
});
test('should reject missing auth header', async () => {
const response = await fetch(`${baseUrl}/basic-auth/user/pass`);
expect(response.status).toBe(401);
});
});
describe('POST endpoint', () => {
test('should echo POST data', async () => {
const testData = {
message: 'Hello, MockServer!',
timestamp: Date.now()
};
const response = await fetch(`${baseUrl}/post`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(testData)
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.data).toEqual(testData);
expect(data.method).toBe('POST');
expect(data.headers).toHaveProperty('content-type', 'application/json');
});
});
describe('Default endpoint', () => {
test('should return request information', async () => {
const response = await fetch(`${baseUrl}/unknown-endpoint`);
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.url).toBe(`${baseUrl}/unknown-endpoint`);
expect(data.method).toBe('GET');
expect(data.headers).toBeDefined();
});
});
});

View file

@ -1,114 +1,114 @@
/**
* Mock HTTP server for testing the HTTP client
* Replaces external dependency on httpbin.org with a local server
*/
export class MockServer {
private server: ReturnType<typeof Bun.serve> | null = null;
private port: number = 0;
/**
* Start the mock server on a random port
*/
async start(): Promise<void> {
this.server = Bun.serve({
port: 1, // Use any available port
fetch: this.handleRequest.bind(this),
error: this.handleError.bind(this),
});
this.port = this.server.port || 1;
console.log(`Mock server started on port ${this.port}`);
}
/**
* Stop the mock server
*/
async stop(): Promise<void> {
if (this.server) {
this.server.stop(true);
this.server = null;
this.port = 0;
console.log('Mock server stopped');
}
}
/**
* Get the base URL of the mock server
*/
getBaseUrl(): string {
if (!this.server) {
throw new Error('Server not started');
}
return `http://localhost:${this.port}`;
}
/**
* Handle incoming requests
*/ private async handleRequest(req: Request): Promise<Response> {
const url = new URL(req.url);
const path = url.pathname;
console.log(`Mock server handling request: ${req.method} ${path}`);
// Status endpoints
if (path.startsWith('/status/')) {
const status = parseInt(path.replace('/status/', ''), 10);
console.log(`Returning status: ${status}`);
return new Response(null, { status });
} // Headers endpoint
if (path === '/headers') {
const headers = Object.fromEntries([...req.headers.entries()]);
console.log('Headers endpoint called, received headers:', headers);
return Response.json({ headers });
} // Basic auth endpoint
if (path.startsWith('/basic-auth/')) {
const parts = path.split('/').filter(Boolean);
const expectedUsername = parts[1];
const expectedPassword = parts[2];
console.log(`Basic auth endpoint called: expected user=${expectedUsername}, pass=${expectedPassword}`);
const authHeader = req.headers.get('authorization');
if (!authHeader || !authHeader.startsWith('Basic ')) {
console.log('Missing or invalid Authorization header');
return new Response('Unauthorized', { status: 401 });
}
const base64Credentials = authHeader.split(' ')[1];
const credentials = atob(base64Credentials);
const [username, password] = credentials.split(':');
if (username === expectedUsername && password === expectedPassword) {
return Response.json({
authenticated: true,
user: username
});
}
return new Response('Unauthorized', { status: 401 });
}
// Echo request body
if (path === '/post' && req.method === 'POST') {
const data = await req.json();
return Response.json({
data,
headers: Object.fromEntries([...req.headers.entries()]),
method: req.method
});
}
// Default response
return Response.json({
url: req.url,
method: req.method,
headers: Object.fromEntries([...req.headers.entries()])
});
}
/**
* Handle errors
*/
private handleError(error: Error): Response {
return new Response('Server error', { status: 500 });
}
}
/**
* Mock HTTP server for testing the HTTP client
* Replaces external dependency on httpbin.org with a local server
*/
export class MockServer {
private server: ReturnType<typeof Bun.serve> | null = null;
private port: number = 0;
/**
* Start the mock server on a random port
*/
async start(): Promise<void> {
this.server = Bun.serve({
port: 1, // Use any available port
fetch: this.handleRequest.bind(this),
error: this.handleError.bind(this),
});
this.port = this.server.port || 1;
console.log(`Mock server started on port ${this.port}`);
}
/**
* Stop the mock server
*/
async stop(): Promise<void> {
if (this.server) {
this.server.stop(true);
this.server = null;
this.port = 0;
console.log('Mock server stopped');
}
}
/**
* Get the base URL of the mock server
*/
getBaseUrl(): string {
if (!this.server) {
throw new Error('Server not started');
}
return `http://localhost:${this.port}`;
}
/**
* Handle incoming requests
*/ private async handleRequest(req: Request): Promise<Response> {
const url = new URL(req.url);
const path = url.pathname;
console.log(`Mock server handling request: ${req.method} ${path}`);
// Status endpoints
if (path.startsWith('/status/')) {
const status = parseInt(path.replace('/status/', ''), 10);
console.log(`Returning status: ${status}`);
return new Response(null, { status });
} // Headers endpoint
if (path === '/headers') {
const headers = Object.fromEntries([...req.headers.entries()]);
console.log('Headers endpoint called, received headers:', headers);
return Response.json({ headers });
} // Basic auth endpoint
if (path.startsWith('/basic-auth/')) {
const parts = path.split('/').filter(Boolean);
const expectedUsername = parts[1];
const expectedPassword = parts[2];
console.log(`Basic auth endpoint called: expected user=${expectedUsername}, pass=${expectedPassword}`);
const authHeader = req.headers.get('authorization');
if (!authHeader || !authHeader.startsWith('Basic ')) {
console.log('Missing or invalid Authorization header');
return new Response('Unauthorized', { status: 401 });
}
const base64Credentials = authHeader.split(' ')[1];
const credentials = atob(base64Credentials);
const [username, password] = credentials.split(':');
if (username === expectedUsername && password === expectedPassword) {
return Response.json({
authenticated: true,
user: username
});
}
return new Response('Unauthorized', { status: 401 });
}
// Echo request body
if (path === '/post' && req.method === 'POST') {
const data = await req.json();
return Response.json({
data,
headers: Object.fromEntries([...req.headers.entries()]),
method: req.method
});
}
// Default response
return Response.json({
url: req.url,
method: req.method,
headers: Object.fromEntries([...req.headers.entries()])
});
}
/**
* Handle errors
*/
private handleError(error: Error): Response {
return new Response('Server error', { status: 500 });
}
}

View file

@ -1,13 +1,13 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**/*", "**/tests/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../logger" }
]
}
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts", "**/test/**/*", "**/tests/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../logger" }
]
}

View file

@ -1,10 +1,10 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}

View file

@ -1,337 +1,337 @@
# @stock-bot/logger
Enhanced logging library with Loki integration for the Stock Bot platform (June 2025).
## Features
- 🎯 **Multiple Log Levels**: debug, info, warn, error, http
- 🌐 **Loki Integration**: Centralized logging with Grafana visualization
- 📁 **File Logging**: Daily rotating log files with compression
- 🎨 **Console Logging**: Colored, formatted console output
- 📊 **Structured Logging**: JSON-formatted logs with metadata
- ⚡ **Performance Optimized**: Batching and async logging
- 🔐 **Security**: Automatic sensitive data masking
- 🎭 **Express Middleware**: Request/response logging
- 📈 **Business Events**: Specialized logging for trading operations
## Installation
```bash
# Using Bun (current runtime)
bun install
```
## Basic Usage
### Simple Logging
```typescript
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('my-service');
logger.info('Service started');
logger.warn('This is a warning');
logger.error('An error occurred', new Error('Something went wrong'));
```
### With Context
```typescript
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('trading-service');
logger.info('Trade executed', {
symbol: 'AAPL',
quantity: 100,
price: 150.25,
userId: '12345',
sessionId: 'abc-def-ghi'
});
```
### Performance Logging
```typescript
import { getLogger, createTimer } from '@stock-bot/logger';
const logger = getLogger('data-processor');
const timer = createTimer('data-processing');
// ... do some work ...
const timing = timer.end();
logger.performance('Data processing completed', timing);
```
### Business Events
```typescript
import { getLogger, createBusinessEvent } from '@stock-bot/logger';
const logger = getLogger('order-service');
logger.business('Order placed', createBusinessEvent(
'order',
'place',
{
entity: 'order-123',
result: 'success',
symbol: 'TSLA',
amount: 50000
}
));
```
### Security Events
```typescript
import { getLogger, createSecurityEvent } from '@stock-bot/logger';
const logger = getLogger('auth-service');
logger.security('Failed login attempt', createSecurityEvent(
'authentication',
{
user: 'john@example.com',
result: 'failure',
ip: '192.168.1.100',
severity: 'medium'
}
));
```
## Express Middleware
### Basic Request Logging
```typescript
import express from 'express';
import { loggingMiddleware } from '@stock-bot/logger';
const app = express();
app.use(loggingMiddleware({
serviceName: 'api-gateway',
skipPaths: ['/health', '/metrics']
}));
```
### Error Logging
```typescript
import { errorLoggingMiddleware, getLogger } from '@stock-bot/logger';
const logger = getLogger('api-gateway');
// Add after your routes but before error handlers
app.use(errorLoggingMiddleware(logger));
```
### Request-scoped Logger
```typescript
import { createRequestLogger, getLogger } from '@stock-bot/logger';
const baseLogger = getLogger('api-gateway');
app.use((req, res, next) => {
req.logger = createRequestLogger(req, baseLogger);
next();
});
app.get('/api/data', (req, res) => {
req.logger.info('Processing data request');
// ... handle request ...
});
```
## Configuration
The logger uses configuration from `@stock-bot/config`. Key environment variables:
```bash
# Logging
LOG_LEVEL=info
LOG_CONSOLE=true
LOG_FILE=true
LOG_FILE_PATH=./logs
# Loki
LOKI_HOST=localhost
LOKI_PORT=3100
LOKI_BATCH_SIZE=1024
```
## Advanced Usage
### Child Loggers
```typescript
import { getLogger } from '@stock-bot/logger';
const parentLogger = getLogger('trading-service');
const orderLogger = parentLogger.child({
module: 'order-processing',
orderId: '12345'
});
orderLogger.info('Order validated'); // Will include parent context
```
### Custom Configuration
```typescript
import { getLogger } from '@stock-bot/logger';
// Uses standard getLogger with service-specific configuration
const logger = getLogger('custom-service');
```
### Sensitive Data Masking
```typescript
import { sanitizeMetadata, maskSensitiveData } from '@stock-bot/logger';
const unsafeData = {
username: 'john',
password: 'secret123',
apiKey: 'abc123def456'
};
const safeData = sanitizeMetadata(unsafeData);
// { username: 'john', password: '[REDACTED]', apiKey: '[REDACTED]' }
const message = maskSensitiveData('User API key: abc123def456');
// 'User API key: [API_KEY]'
```
### Log Throttling
```typescript
import { LogThrottle } from '@stock-bot/logger';
const throttle = new LogThrottle(10, 60000); // 10 logs per minute
if (throttle.shouldLog('error-key')) {
logger.error('This error will be throttled');
}
```
## Viewing Logs
### Grafana Dashboard
1. Start the monitoring stack: `docker-compose up grafana loki`
2. Open Grafana at http://localhost:3000
3. Use the "Stock Bot Logs" dashboard
4. Query logs with LogQL: `{service="your-service"}`
### Log Files
When file logging is enabled, logs are written to:
- `./logs/{service-name}-YYYY-MM-DD.log` - All logs
- `./logs/{service-name}-error-YYYY-MM-DD.log` - Error logs only
## Best Practices
1. **Use appropriate log levels**:
- `debug`: Detailed development information
- `info`: General operational messages
- `warn`: Potential issues
- `error`: Actual errors requiring attention
2. **Include context**: Always provide relevant metadata
```typescript
logger.info('Trade executed', { symbol, quantity, price, orderId });
```
3. **Use structured logging**: Avoid string concatenation
```typescript
// Good
logger.info('User logged in', { userId, ip, userAgent });
// Avoid
logger.info(`User ${userId} logged in from ${ip}`);
```
4. **Handle sensitive data**: Use sanitization utilities
```typescript
const safeMetadata = sanitizeMetadata(requestData);
logger.info('API request', safeMetadata);
```
5. **Use correlation IDs**: Track requests across services
```typescript
const logger = getLogger('service').child({
correlationId: req.headers['x-correlation-id']
});
```
## Integration with Services
To use in your service:
1. Add dependency to your service's `package.json`:
```json
{
"dependencies": {
"@stock-bot/logger": "*"
}
}
```
2. Update your service's `tsconfig.json` references:
```json
{
"references": [
{ "path": "../../../libs/logger" }
]
}
```
3. Import and use:
```typescript
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('my-service');
```
## Performance Considerations
- Logs are batched and sent asynchronously to Loki
- File logging uses daily rotation to prevent large files
- Console logging can be disabled in production
- Use log throttling for high-frequency events
- Sensitive data is automatically masked
## Troubleshooting
### Logs not appearing in Loki
1. Check Loki connection:
```bash
curl http://localhost:3100/ready
```
2. Verify environment variables:
```bash
echo $LOKI_HOST $LOKI_PORT
```
3. Check container logs:
```bash
docker logs stock-bot-loki
```
### High memory usage
- Reduce `LOKI_BATCH_SIZE` if batching too many logs
- Disable file logging if not needed
### Missing logs
- Check log level configuration
- Verify service name matches expectations
- Ensure proper error handling around logger calls
# @stock-bot/logger
Enhanced logging library with Loki integration for the Stock Bot platform (June 2025).
## Features
- 🎯 **Multiple Log Levels**: debug, info, warn, error, http
- 🌐 **Loki Integration**: Centralized logging with Grafana visualization
- 📁 **File Logging**: Daily rotating log files with compression
- 🎨 **Console Logging**: Colored, formatted console output
- 📊 **Structured Logging**: JSON-formatted logs with metadata
- ⚡ **Performance Optimized**: Batching and async logging
- 🔐 **Security**: Automatic sensitive data masking
- 🎭 **Express Middleware**: Request/response logging
- 📈 **Business Events**: Specialized logging for trading operations
## Installation
```bash
# Using Bun (current runtime)
bun install
```
## Basic Usage
### Simple Logging
```typescript
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('my-service');
logger.info('Service started');
logger.warn('This is a warning');
logger.error('An error occurred', new Error('Something went wrong'));
```
### With Context
```typescript
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('trading-service');
logger.info('Trade executed', {
symbol: 'AAPL',
quantity: 100,
price: 150.25,
userId: '12345',
sessionId: 'abc-def-ghi'
});
```
### Performance Logging
```typescript
import { getLogger, createTimer } from '@stock-bot/logger';
const logger = getLogger('data-processor');
const timer = createTimer('data-processing');
// ... do some work ...
const timing = timer.end();
logger.performance('Data processing completed', timing);
```
### Business Events
```typescript
import { getLogger, createBusinessEvent } from '@stock-bot/logger';
const logger = getLogger('order-service');
logger.business('Order placed', createBusinessEvent(
'order',
'place',
{
entity: 'order-123',
result: 'success',
symbol: 'TSLA',
amount: 50000
}
));
```
### Security Events
```typescript
import { getLogger, createSecurityEvent } from '@stock-bot/logger';
const logger = getLogger('auth-service');
logger.security('Failed login attempt', createSecurityEvent(
'authentication',
{
user: 'john@example.com',
result: 'failure',
ip: '192.168.1.100',
severity: 'medium'
}
));
```
## Express Middleware
### Basic Request Logging
```typescript
import express from 'express';
import { loggingMiddleware } from '@stock-bot/logger';
const app = express();
app.use(loggingMiddleware({
serviceName: 'api-gateway',
skipPaths: ['/health', '/metrics']
}));
```
### Error Logging
```typescript
import { errorLoggingMiddleware, getLogger } from '@stock-bot/logger';
const logger = getLogger('api-gateway');
// Add after your routes but before error handlers
app.use(errorLoggingMiddleware(logger));
```
### Request-scoped Logger
```typescript
import { createRequestLogger, getLogger } from '@stock-bot/logger';
const baseLogger = getLogger('api-gateway');
app.use((req, res, next) => {
req.logger = createRequestLogger(req, baseLogger);
next();
});
app.get('/api/data', (req, res) => {
req.logger.info('Processing data request');
// ... handle request ...
});
```
## Configuration
The logger uses configuration from `@stock-bot/config`. Key environment variables:
```bash
# Logging
LOG_LEVEL=info
LOG_CONSOLE=true
LOG_FILE=true
LOG_FILE_PATH=./logs
# Loki
LOKI_HOST=localhost
LOKI_PORT=3100
LOKI_BATCH_SIZE=1024
```
## Advanced Usage
### Child Loggers
```typescript
import { getLogger } from '@stock-bot/logger';
const parentLogger = getLogger('trading-service');
const orderLogger = parentLogger.child({
module: 'order-processing',
orderId: '12345'
});
orderLogger.info('Order validated'); // Will include parent context
```
### Custom Configuration
```typescript
import { getLogger } from '@stock-bot/logger';
// Uses standard getLogger with service-specific configuration
const logger = getLogger('custom-service');
```
### Sensitive Data Masking
```typescript
import { sanitizeMetadata, maskSensitiveData } from '@stock-bot/logger';
const unsafeData = {
username: 'john',
password: 'secret123',
apiKey: 'abc123def456'
};
const safeData = sanitizeMetadata(unsafeData);
// { username: 'john', password: '[REDACTED]', apiKey: '[REDACTED]' }
const message = maskSensitiveData('User API key: abc123def456');
// 'User API key: [API_KEY]'
```
### Log Throttling
```typescript
import { LogThrottle } from '@stock-bot/logger';
const throttle = new LogThrottle(10, 60000); // 10 logs per minute
if (throttle.shouldLog('error-key')) {
logger.error('This error will be throttled');
}
```
## Viewing Logs
### Grafana Dashboard
1. Start the monitoring stack: `docker-compose up grafana loki`
2. Open Grafana at http://localhost:3000
3. Use the "Stock Bot Logs" dashboard
4. Query logs with LogQL: `{service="your-service"}`
### Log Files
When file logging is enabled, logs are written to:
- `./logs/{service-name}-YYYY-MM-DD.log` - All logs
- `./logs/{service-name}-error-YYYY-MM-DD.log` - Error logs only
## Best Practices
1. **Use appropriate log levels**:
- `debug`: Detailed development information
- `info`: General operational messages
- `warn`: Potential issues
- `error`: Actual errors requiring attention
2. **Include context**: Always provide relevant metadata
```typescript
logger.info('Trade executed', { symbol, quantity, price, orderId });
```
3. **Use structured logging**: Avoid string concatenation
```typescript
// Good
logger.info('User logged in', { userId, ip, userAgent });
// Avoid
logger.info(`User ${userId} logged in from ${ip}`);
```
4. **Handle sensitive data**: Use sanitization utilities
```typescript
const safeMetadata = sanitizeMetadata(requestData);
logger.info('API request', safeMetadata);
```
5. **Use correlation IDs**: Track requests across services
```typescript
const logger = getLogger('service').child({
correlationId: req.headers['x-correlation-id']
});
```
## Integration with Services
To use in your service:
1. Add dependency to your service's `package.json`:
```json
{
"dependencies": {
"@stock-bot/logger": "*"
}
}
```
2. Update your service's `tsconfig.json` references:
```json
{
"references": [
{ "path": "../../../libs/logger" }
]
}
```
3. Import and use:
```typescript
import { getLogger } from '@stock-bot/logger';
const logger = getLogger('my-service');
```
## Performance Considerations
- Logs are batched and sent asynchronously to Loki
- File logging uses daily rotation to prevent large files
- Console logging can be disabled in production
- Use log throttling for high-frequency events
- Sensitive data is automatically masked
## Troubleshooting
### Logs not appearing in Loki
1. Check Loki connection:
```bash
curl http://localhost:3100/ready
```
2. Verify environment variables:
```bash
echo $LOKI_HOST $LOKI_PORT
```
3. Check container logs:
```bash
docker logs stock-bot-loki
```
### High memory usage
- Reduce `LOKI_BATCH_SIZE` if batching too many logs
- Disable file logging if not needed
### Missing logs
- Check log level configuration
- Verify service name matches expectations
- Ensure proper error handling around logger calls

View file

@ -1,18 +1,18 @@
# Logger library Bun configuration
[test]
# Configure coverage and test behavior
coverage = true
timeout = "30s"
# Configure test environment
preload = ["./test/setup.ts"]
# Environment variables for tests
[test.env]
NODE_ENV = "test"
LOG_LEVEL = "silent"
LOG_CONSOLE = "false"
LOG_FILE = "false"
LOKI_HOST = ""
LOKI_URL = ""
# Logger library Bun configuration
[test]
# Configure coverage and test behavior
coverage = true
timeout = "30s"
# Configure test environment
preload = ["./test/setup.ts"]
# Environment variables for tests
[test.env]
NODE_ENV = "test"
LOG_LEVEL = "silent"
LOG_CONSOLE = "false"
LOG_FILE = "false"
LOKI_HOST = ""
LOKI_URL = ""

View file

@ -1,18 +1,18 @@
/**
* @stock-bot/logger - Simplified logging library
*
* Main exports for the logger library
*/
// Core logger classes and functions
export {
Logger,
getLogger,
shutdownLoggers
} from './logger';
// Type definitions
export type { LogLevel, LogContext, LogMetadata } from './types';
// Default export
export { getLogger as default } from './logger';
/**
* @stock-bot/logger - Simplified logging library
*
* Main exports for the logger library
*/
// Core logger classes and functions
export {
Logger,
getLogger,
shutdownLoggers
} from './logger';
// Type definitions
export type { LogLevel, LogContext, LogMetadata } from './types';
// Default export
export { getLogger as default } from './logger';

View file

@ -1,271 +1,271 @@
/**
* Simplified Pino-based logger for Stock Bot platform
*
* Features:
* - High performance JSON logging with Pino
* - Console, file, and Loki transports
* - Structured logging with metadata
* - Service-specific context
*/
import pino from 'pino';
import { loggingConfig, lokiConfig } from '@stock-bot/config';
import type { LogLevel, LogContext, LogMetadata } from './types';
// Simple cache for logger instances
const loggerCache = new Map<string, pino.Logger>();
console.log('Logger cache initialized: ', loggingConfig.LOG_LEVEL);
/**
* Create transport configuration
*/
function createTransports(serviceName: string): any {
const targets: any[] = [];
// const isDev = loggingConfig.LOG_ENVIRONMENT === 'development';
// Console transport
if (loggingConfig.LOG_CONSOLE) {
targets.push({
target: 'pino-pretty',
level: loggingConfig.LOG_LEVEL, // Only show errors on console
options: {
colorize: true,
translateTime: 'yyyy-mm-dd HH:MM:ss.l',
messageFormat: '[{service}{childName}] {msg}',
singleLine: true,
hideObject: false,
ignore: 'pid,hostname,service,environment,version,childName',
errorLikeObjectKeys: ['err', 'error'],
errorProps: 'message,stack,name,code',
}
});
}
// File transport
if (loggingConfig.LOG_FILE) {
targets.push({
target: 'pino/file',
level: loggingConfig.LOG_LEVEL,
options: {
destination: `${loggingConfig.LOG_FILE_PATH}/${serviceName}.log`,
mkdir: true
}
});
}
// Loki transport
if (lokiConfig.LOKI_HOST) {
targets.push({
target: 'pino-loki',
level: loggingConfig.LOG_LEVEL,
options: {
host: lokiConfig.LOKI_URL || `http://${lokiConfig.LOKI_HOST}:${lokiConfig.LOKI_PORT}`,
labels: {
service: serviceName,
environment: lokiConfig.LOKI_ENVIRONMENT_LABEL
},
ignore: 'childName',
}
});
}
return targets.length > 0 ? { targets } : null;
}
/**
* Get or create pino logger
*/
function getPinoLogger(serviceName: string): pino.Logger {
if (!loggerCache.has(serviceName)) {
const transport = createTransports(serviceName);
const config: pino.LoggerOptions = {
level: loggingConfig.LOG_LEVEL,
base: {
service: serviceName,
environment: loggingConfig.LOG_ENVIRONMENT,
version: loggingConfig.LOG_SERVICE_VERSION
}
};
if (transport) {
config.transport = transport;
}
loggerCache.set(serviceName, pino(config));
}
return loggerCache.get(serviceName)!;
}
/**
* Simplified Logger class
*/
export class Logger {
private pino: pino.Logger;
private context: LogContext;
private serviceName: string;
private childName?: string;
constructor(serviceName: string, context: LogContext = {}) {
this.pino = getPinoLogger(serviceName);
this.context = context;
this.serviceName = serviceName;
}
/**
* Core log method
*/
private log(level: LogLevel, message: string | object, metadata?: LogMetadata): void {
const data = { ...this.context, ...metadata };
if (typeof message === 'string') {
(this.pino as any)[level](data, message);
} else {
(this.pino as any)[level]({ ...data, data: message }, 'Object logged');
}
}
// Simple log level methods
debug(message: string | object, metadata?: LogMetadata): void {
this.log('debug', message, metadata);
}
info(message: string | object, metadata?: LogMetadata): void {
this.log('info', message, metadata);
}
warn(message: string | object, metadata?: LogMetadata): void {
this.log('warn', message, metadata);
}
error(message: string | object, metadata?: LogMetadata & { error?: any } | unknown): void {
let data: any = {};
// Handle metadata parameter normalization
if (metadata instanceof Error) {
// Direct Error object as metadata
data = { error: metadata };
} else if (metadata !== null && typeof metadata === 'object') {
// Object metadata (including arrays, but not null)
data = { ...metadata };
} else if (metadata !== undefined) {
// Primitive values (string, number, boolean, etc.)
data = { metadata };
}
// Handle multiple error properties in metadata
const errorKeys = ['error', 'err', 'primaryError', 'secondaryError'];
errorKeys.forEach(key => {
if (data[key]) {
const normalizedKey = key === 'error' ? 'err' : `${key}_normalized`;
data[normalizedKey] = this.normalizeError(data[key]);
// Only delete the original 'error' key to maintain other error properties
if (key === 'error') {
delete data.error;
}
}
});
this.log('error', message, data);
}
/**
* Normalize any error type to a structured format
*/
private normalizeError(error: any): any {
if (error instanceof Error) {
return {
name: error.name,
message: error.message,
stack: error.stack,
};
}
if (error && typeof error === 'object') {
// Handle error-like objects
return {
name: error.name || 'UnknownError',
message: error.message || error.toString(),
...(error.stack && { stack: error.stack }),
...(error.code && { code: error.code }),
...(error.status && { status: error.status })
};
}
// Handle primitives (string, number, etc.)
return {
name: 'UnknownError',
message: String(error)
};
}
/**
* Create child logger with additional context
*/
child(serviceName: string, context?: LogContext): Logger {
// Create child logger that shares the same pino instance with additional context
const childLogger = Object.create(Logger.prototype);
childLogger.serviceName = this.serviceName;
childLogger.childName = serviceName;
childLogger.context = { ...this.context, ...context };
const childBindings = {
service: this.serviceName,
childName: ' -> ' + serviceName,
...(context || childLogger.context)
};
childLogger.pino = this.pino.child(childBindings);
return childLogger;
// }
// childLogger.pino = this.pino.child(context || childLogger.context); // Let pino handle level inheritance naturally
// return childLogger;
}
// Getters for service and context
getServiceName(): string {
return this.serviceName;
}
getChildName(): string | undefined {
return this.childName;
}
}
/**
* Main factory function
*/
export function getLogger(serviceName: string, context?: LogContext): Logger {
return new Logger(serviceName, context);
}
/**
* Gracefully shutdown all logger instances
* This should be called during application shutdown to ensure all logs are flushed
*/
export async function shutdownLoggers(): Promise<void> {
const flushPromises = Array.from(loggerCache.values()).map(logger => {
return new Promise<void>((resolve) => {
if (typeof logger.flush === 'function') {
logger.flush((err) => {
if (err) {
console.error('Logger flush error:', err);
}
resolve();
});
} else {
resolve();
}
});
});
try {
await Promise.allSettled(flushPromises);
console.log('All loggers flushed successfully');
} catch (error) {
console.error('Logger flush failed:', error);
} finally {
loggerCache.clear();
}
}
// Export types for convenience
export type { LogLevel, LogContext, LogMetadata } from './types';
/**
* Simplified Pino-based logger for Stock Bot platform
*
* Features:
* - High performance JSON logging with Pino
* - Console, file, and Loki transports
* - Structured logging with metadata
* - Service-specific context
*/
import pino from 'pino';
import { loggingConfig, lokiConfig } from '@stock-bot/config';
import type { LogLevel, LogContext, LogMetadata } from './types';
// Simple cache for logger instances
const loggerCache = new Map<string, pino.Logger>();
console.log('Logger cache initialized: ', loggingConfig.LOG_LEVEL);
/**
* Create transport configuration
*/
function createTransports(serviceName: string): any {
const targets: any[] = [];
// const isDev = loggingConfig.LOG_ENVIRONMENT === 'development';
// Console transport
if (loggingConfig.LOG_CONSOLE) {
targets.push({
target: 'pino-pretty',
level: loggingConfig.LOG_LEVEL, // Only show errors on console
options: {
colorize: true,
translateTime: 'yyyy-mm-dd HH:MM:ss.l',
messageFormat: '[{service}{childName}] {msg}',
singleLine: true,
hideObject: false,
ignore: 'pid,hostname,service,environment,version,childName',
errorLikeObjectKeys: ['err', 'error'],
errorProps: 'message,stack,name,code',
}
});
}
// File transport
if (loggingConfig.LOG_FILE) {
targets.push({
target: 'pino/file',
level: loggingConfig.LOG_LEVEL,
options: {
destination: `${loggingConfig.LOG_FILE_PATH}/${serviceName}.log`,
mkdir: true
}
});
}
// Loki transport
if (lokiConfig.LOKI_HOST) {
targets.push({
target: 'pino-loki',
level: loggingConfig.LOG_LEVEL,
options: {
host: lokiConfig.LOKI_URL || `http://${lokiConfig.LOKI_HOST}:${lokiConfig.LOKI_PORT}`,
labels: {
service: serviceName,
environment: lokiConfig.LOKI_ENVIRONMENT_LABEL
},
ignore: 'childName',
}
});
}
return targets.length > 0 ? { targets } : null;
}
/**
* Get or create pino logger
*/
function getPinoLogger(serviceName: string): pino.Logger {
if (!loggerCache.has(serviceName)) {
const transport = createTransports(serviceName);
const config: pino.LoggerOptions = {
level: loggingConfig.LOG_LEVEL,
base: {
service: serviceName,
environment: loggingConfig.LOG_ENVIRONMENT,
version: loggingConfig.LOG_SERVICE_VERSION
}
};
if (transport) {
config.transport = transport;
}
loggerCache.set(serviceName, pino(config));
}
return loggerCache.get(serviceName)!;
}
/**
* Simplified Logger class
*/
export class Logger {
private pino: pino.Logger;
private context: LogContext;
private serviceName: string;
private childName?: string;
constructor(serviceName: string, context: LogContext = {}) {
this.pino = getPinoLogger(serviceName);
this.context = context;
this.serviceName = serviceName;
}
/**
* Core log method
*/
private log(level: LogLevel, message: string | object, metadata?: LogMetadata): void {
const data = { ...this.context, ...metadata };
if (typeof message === 'string') {
(this.pino as any)[level](data, message);
} else {
(this.pino as any)[level]({ ...data, data: message }, 'Object logged');
}
}
// Simple log level methods
debug(message: string | object, metadata?: LogMetadata): void {
this.log('debug', message, metadata);
}
info(message: string | object, metadata?: LogMetadata): void {
this.log('info', message, metadata);
}
warn(message: string | object, metadata?: LogMetadata): void {
this.log('warn', message, metadata);
}
error(message: string | object, metadata?: LogMetadata & { error?: any } | unknown): void {
let data: any = {};
// Handle metadata parameter normalization
if (metadata instanceof Error) {
// Direct Error object as metadata
data = { error: metadata };
} else if (metadata !== null && typeof metadata === 'object') {
// Object metadata (including arrays, but not null)
data = { ...metadata };
} else if (metadata !== undefined) {
// Primitive values (string, number, boolean, etc.)
data = { metadata };
}
// Handle multiple error properties in metadata
const errorKeys = ['error', 'err', 'primaryError', 'secondaryError'];
errorKeys.forEach(key => {
if (data[key]) {
const normalizedKey = key === 'error' ? 'err' : `${key}_normalized`;
data[normalizedKey] = this.normalizeError(data[key]);
// Only delete the original 'error' key to maintain other error properties
if (key === 'error') {
delete data.error;
}
}
});
this.log('error', message, data);
}
/**
* Normalize any error type to a structured format
*/
private normalizeError(error: any): any {
if (error instanceof Error) {
return {
name: error.name,
message: error.message,
stack: error.stack,
};
}
if (error && typeof error === 'object') {
// Handle error-like objects
return {
name: error.name || 'UnknownError',
message: error.message || error.toString(),
...(error.stack && { stack: error.stack }),
...(error.code && { code: error.code }),
...(error.status && { status: error.status })
};
}
// Handle primitives (string, number, etc.)
return {
name: 'UnknownError',
message: String(error)
};
}
/**
* Create child logger with additional context
*/
child(serviceName: string, context?: LogContext): Logger {
// Create child logger that shares the same pino instance with additional context
const childLogger = Object.create(Logger.prototype);
childLogger.serviceName = this.serviceName;
childLogger.childName = serviceName;
childLogger.context = { ...this.context, ...context };
const childBindings = {
service: this.serviceName,
childName: ' -> ' + serviceName,
...(context || childLogger.context)
};
childLogger.pino = this.pino.child(childBindings);
return childLogger;
// }
// childLogger.pino = this.pino.child(context || childLogger.context); // Let pino handle level inheritance naturally
// return childLogger;
}
// Getters for service and context
getServiceName(): string {
return this.serviceName;
}
getChildName(): string | undefined {
return this.childName;
}
}
/**
* Main factory function
*/
export function getLogger(serviceName: string, context?: LogContext): Logger {
return new Logger(serviceName, context);
}
/**
* Gracefully shutdown all logger instances
* This should be called during application shutdown to ensure all logs are flushed
*/
export async function shutdownLoggers(): Promise<void> {
const flushPromises = Array.from(loggerCache.values()).map(logger => {
return new Promise<void>((resolve) => {
if (typeof logger.flush === 'function') {
logger.flush((err) => {
if (err) {
console.error('Logger flush error:', err);
}
resolve();
});
} else {
resolve();
}
});
});
try {
await Promise.allSettled(flushPromises);
console.log('All loggers flushed successfully');
} catch (error) {
console.error('Logger flush failed:', error);
} finally {
loggerCache.clear();
}
}
// Export types for convenience
export type { LogLevel, LogContext, LogMetadata } from './types';

View file

@ -1,16 +1,16 @@
/**
* Simplified type definitions for the logger library
*/
// Standard log levels (simplified to pino defaults)
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
// Context that persists across log calls
export interface LogContext {
[key: string]: any;
}
// Metadata for individual log entries
export interface LogMetadata {
[key: string]: any;
}
/**
* Simplified type definitions for the logger library
*/
// Standard log levels (simplified to pino defaults)
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
// Context that persists across log calls
export interface LogContext {
[key: string]: any;
}
// Metadata for individual log entries
export interface LogMetadata {
[key: string]: any;
}

View file

@ -1,200 +1,200 @@
/**
* Advanced Logger Tests
*
* Tests for advanced logger functionality including complex metadata handling,
* child loggers, and advanced error scenarios.
*/
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
import { Logger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Advanced Logger Features', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('advanced-features');
logger = testLoggerInstance.logger;
}); afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Complex Metadata Handling', () => {
it('should handle nested metadata objects', () => {
const complexMetadata = {
user: { id: '123', name: 'John Doe' },
session: { id: 'sess-456', timeout: 3600 },
request: { method: 'POST', path: '/api/test' }
};
logger.info('Complex operation', complexMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].user).toEqual({ id: '123', name: 'John Doe' });
expect(logs[0].session).toEqual({ id: 'sess-456', timeout: 3600 });
expect(logs[0].request).toEqual({ method: 'POST', path: '/api/test' });
});
it('should handle arrays in metadata', () => {
const arrayMetadata = {
tags: ['user', 'authentication', 'success'],
ids: [1, 2, 3, 4]
};
logger.info('Array metadata test', arrayMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].tags).toEqual(['user', 'authentication', 'success']);
expect(logs[0].ids).toEqual([1, 2, 3, 4]);
});
it('should handle null and undefined metadata values', () => {
const nullMetadata = {
nullValue: null,
undefinedValue: undefined,
emptyString: '',
zeroValue: 0
};
logger.info('Null metadata test', nullMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].nullValue).toBe(null);
expect(logs[0].emptyString).toBe('');
expect(logs[0].zeroValue).toBe(0);
});
});
describe('Child Logger Functionality', () => {
it('should create child logger with additional context', () => {
const childLogger = logger.child({
component: 'auth-service',
version: '1.2.3'
});
childLogger.info('Child logger message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].component).toBe('auth-service');
expect(logs[0].version).toBe('1.2.3');
expect(logs[0].msg).toBe('Child logger message');
});
it('should support nested child loggers', () => {
const childLogger = logger.child({ level1: 'parent' });
const grandChildLogger = childLogger.child({ level2: 'child' });
grandChildLogger.warn('Nested child message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level1).toBe('parent');
expect(logs[0].level2).toBe('child');
expect(logs[0].level).toBe('warn');
});
it('should merge child context with log metadata', () => {
const childLogger = logger.child({ service: 'api' });
childLogger.info('Request processed', {
requestId: 'req-789',
duration: 150
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('api');
expect(logs[0].requestId).toBe('req-789');
expect(logs[0].duration).toBe(150);
});
});
describe('Advanced Error Handling', () => {
it('should handle Error objects with custom properties', () => {
const customError = new Error('Custom error message');
(customError as any).code = 'ERR_CUSTOM';
(customError as any).statusCode = 500;
logger.error('Custom error occurred', { error: customError });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle multiple errors in metadata', () => {
const error1 = new Error('First error');
const error2 = new Error('Second error');
logger.error('Multiple errors', {
primaryError: error1,
secondaryError: error2,
context: 'batch processing'
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].context).toBe('batch processing');
});
it('should handle error objects with circular references', () => {
const errorWithCircular: any = { name: 'CircularError', message: 'Circular reference error' };
// Create a simple circular reference
errorWithCircular.self = errorWithCircular;
// Should not throw when logging circular references
expect(() => {
logger.error('Circular error test', { error: errorWithCircular });
}).not.toThrow();
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
// Clean up circular reference to prevent memory issues
delete errorWithCircular.self;
});
});
describe('Performance and Edge Cases', () => {
it('should handle moderate metadata objects', () => {
const moderateMetadata: any = {};
for (let i = 0; i < 10; i++) {
moderateMetadata[`key${i}`] = `value${i}`;
}
logger.debug('Moderate metadata test', moderateMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].key0).toBe('value0');
expect(logs[0].key9).toBe('value9');
});
it('should handle special characters in messages', () => {
const specialMessage = 'Special chars: 🚀 ñ ü';
logger.info(specialMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe(specialMessage);
});
it('should handle empty and whitespace-only messages', () => {
logger.info('');
logger.info(' ');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('');
expect(logs[1].msg).toBe(' ');
});
});
});
/**
* Advanced Logger Tests
*
* Tests for advanced logger functionality including complex metadata handling,
* child loggers, and advanced error scenarios.
*/
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
import { Logger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Advanced Logger Features', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('advanced-features');
logger = testLoggerInstance.logger;
}); afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Complex Metadata Handling', () => {
it('should handle nested metadata objects', () => {
const complexMetadata = {
user: { id: '123', name: 'John Doe' },
session: { id: 'sess-456', timeout: 3600 },
request: { method: 'POST', path: '/api/test' }
};
logger.info('Complex operation', complexMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].user).toEqual({ id: '123', name: 'John Doe' });
expect(logs[0].session).toEqual({ id: 'sess-456', timeout: 3600 });
expect(logs[0].request).toEqual({ method: 'POST', path: '/api/test' });
});
it('should handle arrays in metadata', () => {
const arrayMetadata = {
tags: ['user', 'authentication', 'success'],
ids: [1, 2, 3, 4]
};
logger.info('Array metadata test', arrayMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].tags).toEqual(['user', 'authentication', 'success']);
expect(logs[0].ids).toEqual([1, 2, 3, 4]);
});
it('should handle null and undefined metadata values', () => {
const nullMetadata = {
nullValue: null,
undefinedValue: undefined,
emptyString: '',
zeroValue: 0
};
logger.info('Null metadata test', nullMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].nullValue).toBe(null);
expect(logs[0].emptyString).toBe('');
expect(logs[0].zeroValue).toBe(0);
});
});
describe('Child Logger Functionality', () => {
it('should create child logger with additional context', () => {
const childLogger = logger.child({
component: 'auth-service',
version: '1.2.3'
});
childLogger.info('Child logger message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].component).toBe('auth-service');
expect(logs[0].version).toBe('1.2.3');
expect(logs[0].msg).toBe('Child logger message');
});
it('should support nested child loggers', () => {
const childLogger = logger.child({ level1: 'parent' });
const grandChildLogger = childLogger.child({ level2: 'child' });
grandChildLogger.warn('Nested child message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level1).toBe('parent');
expect(logs[0].level2).toBe('child');
expect(logs[0].level).toBe('warn');
});
it('should merge child context with log metadata', () => {
const childLogger = logger.child({ service: 'api' });
childLogger.info('Request processed', {
requestId: 'req-789',
duration: 150
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('api');
expect(logs[0].requestId).toBe('req-789');
expect(logs[0].duration).toBe(150);
});
});
describe('Advanced Error Handling', () => {
it('should handle Error objects with custom properties', () => {
const customError = new Error('Custom error message');
(customError as any).code = 'ERR_CUSTOM';
(customError as any).statusCode = 500;
logger.error('Custom error occurred', { error: customError });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle multiple errors in metadata', () => {
const error1 = new Error('First error');
const error2 = new Error('Second error');
logger.error('Multiple errors', {
primaryError: error1,
secondaryError: error2,
context: 'batch processing'
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].context).toBe('batch processing');
});
it('should handle error objects with circular references', () => {
const errorWithCircular: any = { name: 'CircularError', message: 'Circular reference error' };
// Create a simple circular reference
errorWithCircular.self = errorWithCircular;
// Should not throw when logging circular references
expect(() => {
logger.error('Circular error test', { error: errorWithCircular });
}).not.toThrow();
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
// Clean up circular reference to prevent memory issues
delete errorWithCircular.self;
});
});
describe('Performance and Edge Cases', () => {
it('should handle moderate metadata objects', () => {
const moderateMetadata: any = {};
for (let i = 0; i < 10; i++) {
moderateMetadata[`key${i}`] = `value${i}`;
}
logger.debug('Moderate metadata test', moderateMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].key0).toBe('value0');
expect(logs[0].key9).toBe('value9');
});
it('should handle special characters in messages', () => {
const specialMessage = 'Special chars: 🚀 ñ ü';
logger.info(specialMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe(specialMessage);
});
it('should handle empty and whitespace-only messages', () => {
logger.info('');
logger.info(' ');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('');
expect(logs[1].msg).toBe(' ');
});
});
});

View file

@ -1,169 +1,169 @@
/**
* Basic Logger Tests
*
* Tests for the core logger functionality and utilities.
*/
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
import { Logger, getLogger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Basic Logger Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('utils-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Logger Factory Functions', () => {
it('should create logger with getLogger', () => {
expect(typeof getLogger).toBe('function');
// Test that getLogger doesn't throw
expect(() => {
const anotherTestLoggerInstance = loggerTestHelpers.createTestLogger('factory-test');
anotherTestLoggerInstance.logger.info('Factory test');
}).not.toThrow();
});
});
describe('Logger Methods', () => {
it('should have all required logging methods', () => {
expect(typeof logger.debug).toBe('function');
expect(typeof logger.info).toBe('function');
expect(typeof logger.warn).toBe('function');
expect(typeof logger.error).toBe('function');
expect(typeof logger.child).toBe('function');
});
it('should log with different message types', () => {
// String message
logger.info('String message');
// Object message
logger.info({ event: 'object_message', data: 'test' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('String message');
expect(logs[1].level).toBe('info');
});
it('should handle metadata correctly', () => {
const metadata = {
userId: 'user123',
sessionId: 'session456',
requestId: 'req789'
};
logger.info('Request processed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('user123');
expect(logs[0].sessionId).toBe('session456');
expect(logs[0].requestId).toBe('req789');
});
});
describe('Child Logger Functionality', () => {
it('should create child loggers with additional context', () => {
const childLogger = logger.child({
module: 'payment',
version: '1.0.0'
});
childLogger.info('Payment processed');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Payment processed');
});
it('should inherit service name in child loggers', () => {
const childLogger = logger.child({ operation: 'test' });
childLogger.info('Child operation');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('utils-test');
});
});
describe('Error Normalization', () => {
it('should handle Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error test', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'ValidationError',
message: 'Invalid input',
code: 'VALIDATION_FAILED'
};
logger.error('Validation failed', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle primitive error values', () => {
logger.error('Simple error', { error: 'Error string' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
});
describe('Service Context', () => {
it('should include service name in all logs', () => {
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warn message');
logger.error('Error message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(4);
logs.forEach(log => {
expect(log.service).toBe('utils-test');
});
});
it('should support different service names', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-one');
const logger2Instance = loggerTestHelpers.createTestLogger('service-two');
logger1Instance.logger.info('Message from service one');
logger2Instance.logger.info('Message from service two');
// Since each logger instance has its own capture, we check them separately
// or combine them if that's the desired test logic.
// For this test, it seems we want to ensure they are separate.
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-one');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-two');
});
});
});
/**
* Basic Logger Tests
*
* Tests for the core logger functionality and utilities.
*/
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
import { Logger, getLogger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Basic Logger Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('utils-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Logger Factory Functions', () => {
it('should create logger with getLogger', () => {
expect(typeof getLogger).toBe('function');
// Test that getLogger doesn't throw
expect(() => {
const anotherTestLoggerInstance = loggerTestHelpers.createTestLogger('factory-test');
anotherTestLoggerInstance.logger.info('Factory test');
}).not.toThrow();
});
});
describe('Logger Methods', () => {
it('should have all required logging methods', () => {
expect(typeof logger.debug).toBe('function');
expect(typeof logger.info).toBe('function');
expect(typeof logger.warn).toBe('function');
expect(typeof logger.error).toBe('function');
expect(typeof logger.child).toBe('function');
});
it('should log with different message types', () => {
// String message
logger.info('String message');
// Object message
logger.info({ event: 'object_message', data: 'test' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('String message');
expect(logs[1].level).toBe('info');
});
it('should handle metadata correctly', () => {
const metadata = {
userId: 'user123',
sessionId: 'session456',
requestId: 'req789'
};
logger.info('Request processed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('user123');
expect(logs[0].sessionId).toBe('session456');
expect(logs[0].requestId).toBe('req789');
});
});
describe('Child Logger Functionality', () => {
it('should create child loggers with additional context', () => {
const childLogger = logger.child({
module: 'payment',
version: '1.0.0'
});
childLogger.info('Payment processed');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Payment processed');
});
it('should inherit service name in child loggers', () => {
const childLogger = logger.child({ operation: 'test' });
childLogger.info('Child operation');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('utils-test');
});
});
describe('Error Normalization', () => {
it('should handle Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error test', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'ValidationError',
message: 'Invalid input',
code: 'VALIDATION_FAILED'
};
logger.error('Validation failed', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle primitive error values', () => {
logger.error('Simple error', { error: 'Error string' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
});
describe('Service Context', () => {
it('should include service name in all logs', () => {
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warn message');
logger.error('Error message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(4);
logs.forEach(log => {
expect(log.service).toBe('utils-test');
});
});
it('should support different service names', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-one');
const logger2Instance = loggerTestHelpers.createTestLogger('service-two');
logger1Instance.logger.info('Message from service one');
logger2Instance.logger.info('Message from service two');
// Since each logger instance has its own capture, we check them separately
// or combine them if that's the desired test logic.
// For this test, it seems we want to ensure they are separate.
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-one');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-two');
});
});
});

View file

@ -1,192 +1,192 @@
/**
* Logger Integration Tests
*
* Tests the core functionality of the simplified @stock-bot/logger package.
*/
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
import {
Logger,
getLogger,
shutdownLoggers
} from '../src';
import { loggerTestHelpers } from './setup';
describe('Logger Integration Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('integration-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Core Logger Functionality', () => {
it('should log messages at different levels', () => {
// Test multiple log levels
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warning message');
logger.error('Error message');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify logs were captured
expect(logs.length).toBe(4);
expect(logs[0].level).toBe('debug');
expect(logs[0].msg).toBe('Debug message');
expect(logs[1].level).toBe('info');
expect(logs[1].msg).toBe('Info message');
expect(logs[2].level).toBe('warn');
expect(logs[2].msg).toBe('Warning message');
expect(logs[3].level).toBe('error');
expect(logs[3].msg).toBe('Error message');
});
it('should log objects as structured logs', () => {
// Log an object
logger.info('User logged in', { userId: '123', action: 'login' });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify structured log
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('123');
expect(logs[0].action).toBe('login');
expect(logs[0].msg).toBe('User logged in');
});
it('should handle error objects in error logs', () => {
const testError = new Error('Test error message');
// Log error with error object
logger.error('Something went wrong', { error: testError });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify error was logged
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Something went wrong');
});
it('should create child loggers with additional context', () => {
// Create a child logger with additional context
const childLogger = logger.child({
transactionId: 'tx-789',
operation: 'payment'
});
// Log with child logger
childLogger.info('Child logger test');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify child logger logged something
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Child logger test');
});
});
describe('Factory Functions', () => {
it('should export factory functions', () => {
// Verify that the factory functions are exported and callable
expect(typeof getLogger).toBe('function');
});
it('should create different logger instances', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-1');
const logger2Instance = loggerTestHelpers.createTestLogger('service-2');
logger1Instance.logger.info('Message from service 1');
logger2Instance.logger.info('Message from service 2');
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-1');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-2');
});
});
describe('Error Handling', () => {
it('should normalize Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error occurred', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Error occurred');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'CustomError',
message: 'Custom error message',
code: 'ERR_CUSTOM'
};
logger.error('Custom error occurred', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle primitive error values', () => {
logger.error('String error occurred', { error: 'Simple string error' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('String error occurred');
});
});
describe('Metadata Handling', () => {
it('should include metadata in logs', () => {
const metadata = {
requestId: 'req-123',
userId: 'user-456',
operation: 'data-fetch'
};
logger.info('Operation completed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].requestId).toBe('req-123');
expect(logs[0].userId).toBe('user-456');
expect(logs[0].operation).toBe('data-fetch');
});
it('should handle object messages', () => {
const objectMessage = {
event: 'user_action',
action: 'login',
timestamp: Date.now()
};
logger.info(objectMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('info');
});
});
});
/**
* Logger Integration Tests
*
* Tests the core functionality of the simplified @stock-bot/logger package.
*/
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
import {
Logger,
getLogger,
shutdownLoggers
} from '../src';
import { loggerTestHelpers } from './setup';
describe('Logger Integration Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('integration-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Core Logger Functionality', () => {
it('should log messages at different levels', () => {
// Test multiple log levels
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warning message');
logger.error('Error message');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify logs were captured
expect(logs.length).toBe(4);
expect(logs[0].level).toBe('debug');
expect(logs[0].msg).toBe('Debug message');
expect(logs[1].level).toBe('info');
expect(logs[1].msg).toBe('Info message');
expect(logs[2].level).toBe('warn');
expect(logs[2].msg).toBe('Warning message');
expect(logs[3].level).toBe('error');
expect(logs[3].msg).toBe('Error message');
});
it('should log objects as structured logs', () => {
// Log an object
logger.info('User logged in', { userId: '123', action: 'login' });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify structured log
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('123');
expect(logs[0].action).toBe('login');
expect(logs[0].msg).toBe('User logged in');
});
it('should handle error objects in error logs', () => {
const testError = new Error('Test error message');
// Log error with error object
logger.error('Something went wrong', { error: testError });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify error was logged
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Something went wrong');
});
it('should create child loggers with additional context', () => {
// Create a child logger with additional context
const childLogger = logger.child({
transactionId: 'tx-789',
operation: 'payment'
});
// Log with child logger
childLogger.info('Child logger test');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify child logger logged something
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Child logger test');
});
});
describe('Factory Functions', () => {
it('should export factory functions', () => {
// Verify that the factory functions are exported and callable
expect(typeof getLogger).toBe('function');
});
it('should create different logger instances', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-1');
const logger2Instance = loggerTestHelpers.createTestLogger('service-2');
logger1Instance.logger.info('Message from service 1');
logger2Instance.logger.info('Message from service 2');
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-1');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-2');
});
});
describe('Error Handling', () => {
it('should normalize Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error occurred', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Error occurred');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'CustomError',
message: 'Custom error message',
code: 'ERR_CUSTOM'
};
logger.error('Custom error occurred', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle primitive error values', () => {
logger.error('String error occurred', { error: 'Simple string error' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('String error occurred');
});
});
describe('Metadata Handling', () => {
it('should include metadata in logs', () => {
const metadata = {
requestId: 'req-123',
userId: 'user-456',
operation: 'data-fetch'
};
logger.info('Operation completed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].requestId).toBe('req-123');
expect(logs[0].userId).toBe('user-456');
expect(logs[0].operation).toBe('data-fetch');
});
it('should handle object messages', () => {
const objectMessage = {
event: 'user_action',
action: 'login',
timestamp: Date.now()
};
logger.info(objectMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('info');
});
});
});

View file

@ -1,137 +1,137 @@
/**
* Logger Test Setup
*
* Setup file specific to Logger library tests.
* Provides utilities and mocks for testing logging operations.
*/
import { Logger, LogMetadata, shutdownLoggers } from '../src';
import { afterAll, afterEach, beforeAll, beforeEach } from 'bun:test';
// Store original console methods
const originalConsole = {
log: console.log,
info: console.info,
warn: console.warn,
error: console.error,
debug: console.debug
};
// Create a test logger helper
export const loggerTestHelpers = {
/**
* Mock Loki transport
*/
mockLokiTransport: () => ({
on: () => {},
write: () => {}
}),
/**
* Create a mock Hono context for middleware tests
*/ createHonoContextMock: (options: any = {}) => {
// Default path and method
const path = options.path || '/test';
const method = options.method || 'GET';
// Create request headers
const headerEntries = Object.entries(options.req?.headers || {});
const headerMap = new Map(headerEntries);
const rawHeaders = new Headers();
headerEntries.forEach(([key, value]) => rawHeaders.set(key, value as string));
// Create request with standard properties needed for middleware
const req = {
method,
url: `http://localhost${path}`,
path,
raw: {
url: `http://localhost${path}`,
method,
headers: rawHeaders
},
query: {},
param: () => undefined,
header: (name: string) => rawHeaders.get(name.toLowerCase()),
headers: headerMap,
...options.req
};
// Create mock response
const res = {
status: 200,
statusText: 'OK',
body: null,
headers: new Map(),
clone: function() { return { ...this, text: async () => JSON.stringify(this.body) }; },
text: async () => JSON.stringify(res.body),
...options.res
};
// Create context with all required Hono methods
const c: any = {
req,
env: {},
res,
header: (name: string, value: string) => {
c.res.headers.set(name.toLowerCase(), value);
return c;
},
get: (key: string) => c[key],
set: (key: string, value: any) => { c[key] = value; return c; },
status: (code: number) => { c.res.status = code; return c; },
json: (body: any) => { c.res.body = body; return c; },
executionCtx: { waitUntil: (fn: Function) => { fn(); } }
};
return c;
},
/**
* Create a mock Next function for middleware tests
*/
createNextMock: () => {
return async () => {
// Do nothing, simulate middleware completion
return;
};
}
};
// Setup environment before tests
beforeAll(() => {
// Don't let real logs through during tests
console.log = () => {};
console.info = () => {};
console.warn = () => {};
console.error = () => {};
console.debug = () => {};
// Override NODE_ENV for tests
process.env.NODE_ENV = 'test';
// Disable real logging during tests
process.env.LOG_LEVEL = 'silent';
process.env.LOG_CONSOLE = 'false';
process.env.LOG_FILE = 'false';
// Mock Loki config to prevent real connections
process.env.LOKI_HOST = '';
process.env.LOKI_URL = '';
});
// Clean up after each test
afterEach(async () => {
// Clear logger cache to prevent state pollution between tests
await shutdownLoggers();
});
// Restore everything after tests
afterAll(() => {
console.log = originalConsole.log;
console.info = originalConsole.info;
console.warn = originalConsole.warn;
console.error = originalConsole.error;
console.debug = originalConsole.debug;
});
/**
* Logger Test Setup
*
* Setup file specific to Logger library tests.
* Provides utilities and mocks for testing logging operations.
*/
import { Logger, LogMetadata, shutdownLoggers } from '../src';
import { afterAll, afterEach, beforeAll, beforeEach } from 'bun:test';
// Store original console methods
const originalConsole = {
log: console.log,
info: console.info,
warn: console.warn,
error: console.error,
debug: console.debug
};
// Create a test logger helper
export const loggerTestHelpers = {
/**
* Mock Loki transport
*/
mockLokiTransport: () => ({
on: () => {},
write: () => {}
}),
/**
* Create a mock Hono context for middleware tests
*/ createHonoContextMock: (options: any = {}) => {
// Default path and method
const path = options.path || '/test';
const method = options.method || 'GET';
// Create request headers
const headerEntries = Object.entries(options.req?.headers || {});
const headerMap = new Map(headerEntries);
const rawHeaders = new Headers();
headerEntries.forEach(([key, value]) => rawHeaders.set(key, value as string));
// Create request with standard properties needed for middleware
const req = {
method,
url: `http://localhost${path}`,
path,
raw: {
url: `http://localhost${path}`,
method,
headers: rawHeaders
},
query: {},
param: () => undefined,
header: (name: string) => rawHeaders.get(name.toLowerCase()),
headers: headerMap,
...options.req
};
// Create mock response
const res = {
status: 200,
statusText: 'OK',
body: null,
headers: new Map(),
clone: function() { return { ...this, text: async () => JSON.stringify(this.body) }; },
text: async () => JSON.stringify(res.body),
...options.res
};
// Create context with all required Hono methods
const c: any = {
req,
env: {},
res,
header: (name: string, value: string) => {
c.res.headers.set(name.toLowerCase(), value);
return c;
},
get: (key: string) => c[key],
set: (key: string, value: any) => { c[key] = value; return c; },
status: (code: number) => { c.res.status = code; return c; },
json: (body: any) => { c.res.body = body; return c; },
executionCtx: { waitUntil: (fn: Function) => { fn(); } }
};
return c;
},
/**
* Create a mock Next function for middleware tests
*/
createNextMock: () => {
return async () => {
// Do nothing, simulate middleware completion
return;
};
}
};
// Setup environment before tests
beforeAll(() => {
// Don't let real logs through during tests
console.log = () => {};
console.info = () => {};
console.warn = () => {};
console.error = () => {};
console.debug = () => {};
// Override NODE_ENV for tests
process.env.NODE_ENV = 'test';
// Disable real logging during tests
process.env.LOG_LEVEL = 'silent';
process.env.LOG_CONSOLE = 'false';
process.env.LOG_FILE = 'false';
// Mock Loki config to prevent real connections
process.env.LOKI_HOST = '';
process.env.LOKI_URL = '';
});
// Clean up after each test
afterEach(async () => {
// Clear logger cache to prevent state pollution between tests
await shutdownLoggers();
});
// Restore everything after tests
afterAll(() => {
console.log = originalConsole.log;
console.info = originalConsole.info;
console.warn = originalConsole.warn;
console.error = originalConsole.error;
console.debug = originalConsole.debug;
});

View file

@ -1,12 +1,12 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" }
]
}
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" }
]
}

View file

@ -1,10 +1,10 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}

View file

@ -1,72 +1,72 @@
# MongoDB Client Library
A comprehensive MongoDB client library for the Stock Bot trading platform, designed for handling document storage, raw data, and unstructured content.
## Features
- **Connection Management**: Robust connection pooling and failover
- **Schema Validation**: Built-in validation using Zod schemas
- **Type Safety**: Full TypeScript support with typed collections
- **Error Handling**: Comprehensive error handling and retry logic
- **Health Monitoring**: Connection health monitoring and metrics
- **Transactions**: Support for multi-document transactions
- **Aggregation**: Helper methods for complex aggregation pipelines
## Usage
```typescript
import { MongoDBClient } from '@stock-bot/mongodb-client';
// Initialize client
const mongoClient = new MongoDBClient();
await mongoClient.connect();
// Get a typed collection
const collection = mongoClient.getCollection('sentiment_data');
// Insert document
await collection.insertOne({
symbol: 'AAPL',
sentiment: 'positive',
source: 'reddit',
timestamp: new Date()
});
// Query with aggregation
const results = await collection.aggregate([
{ $match: { symbol: 'AAPL' } },
{ $group: { _id: '$sentiment', count: { $sum: 1 } } }
]);
```
## Collections
The client provides typed access to the following collections:
- **sentiment_data**: Social media sentiment analysis
- **raw_documents**: Unprocessed documents and content
- **news_articles**: Financial news and articles
- **sec_filings**: SEC filing documents
- **earnings_transcripts**: Earnings call transcripts
- **analyst_reports**: Research reports and analysis
## Configuration
Configure using environment variables:
```env
MONGODB_HOST=localhost
MONGODB_PORT=27017
MONGODB_DATABASE=trading_documents
MONGODB_USERNAME=trading_admin
MONGODB_PASSWORD=your_password
```
## Health Monitoring
The client includes built-in health monitoring:
```typescript
const health = await mongoClient.getHealth();
console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy'
```
# MongoDB Client Library
A comprehensive MongoDB client library for the Stock Bot trading platform, designed for handling document storage, raw data, and unstructured content.
## Features
- **Connection Management**: Robust connection pooling and failover
- **Schema Validation**: Built-in validation using Zod schemas
- **Type Safety**: Full TypeScript support with typed collections
- **Error Handling**: Comprehensive error handling and retry logic
- **Health Monitoring**: Connection health monitoring and metrics
- **Transactions**: Support for multi-document transactions
- **Aggregation**: Helper methods for complex aggregation pipelines
## Usage
```typescript
import { MongoDBClient } from '@stock-bot/mongodb-client';
// Initialize client
const mongoClient = new MongoDBClient();
await mongoClient.connect();
// Get a typed collection
const collection = mongoClient.getCollection('sentiment_data');
// Insert document
await collection.insertOne({
symbol: 'AAPL',
sentiment: 'positive',
source: 'reddit',
timestamp: new Date()
});
// Query with aggregation
const results = await collection.aggregate([
{ $match: { symbol: 'AAPL' } },
{ $group: { _id: '$sentiment', count: { $sum: 1 } } }
]);
```
## Collections
The client provides typed access to the following collections:
- **sentiment_data**: Social media sentiment analysis
- **raw_documents**: Unprocessed documents and content
- **news_articles**: Financial news and articles
- **sec_filings**: SEC filing documents
- **earnings_transcripts**: Earnings call transcripts
- **analyst_reports**: Research reports and analysis
## Configuration
Configure using environment variables:
```env
MONGODB_HOST=localhost
MONGODB_PORT=27017
MONGODB_DATABASE=trading_documents
MONGODB_USERNAME=trading_admin
MONGODB_PASSWORD=your_password
```
## Health Monitoring
The client includes built-in health monitoring:
```typescript
const health = await mongoClient.getHealth();
console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy'
```

View file

@ -1,51 +1,51 @@
{
"name": "@stock-bot/mongodb-client",
"version": "1.0.0",
"description": "MongoDB client library for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"@types/mongodb": "^4.0.7",
"mongodb": "^6.17.0",
"yup": "^1.6.1"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"keywords": [
"mongodb",
"database",
"client",
"stock-bot"
],
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
],
"paths": {
"*": ["node_modules/*", "../../node_modules/*"]
}
}
{
"name": "@stock-bot/mongodb-client",
"version": "1.0.0",
"description": "MongoDB client library for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"@types/mongodb": "^4.0.7",
"mongodb": "^6.17.0",
"yup": "^1.6.1"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"keywords": [
"mongodb",
"database",
"client",
"stock-bot"
],
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
],
"paths": {
"*": ["node_modules/*", "../../node_modules/*"]
}
}

View file

@ -1,247 +1,247 @@
import type { Document } from 'mongodb';
import type { MongoDBClient } from './client';
import type { CollectionNames } from './types';
/**
* MongoDB Aggregation Builder
*
* Provides a fluent interface for building MongoDB aggregation pipelines
*/
export class MongoDBAggregationBuilder {
private pipeline: any[] = [];
private readonly client: MongoDBClient;
private collection: CollectionNames | null = null;
constructor(client: MongoDBClient) {
this.client = client;
}
/**
* Set the collection to aggregate on
*/
from(collection: CollectionNames): this {
this.collection = collection;
return this;
}
/**
* Add a match stage
*/
match(filter: any): this {
this.pipeline.push({ $match: filter });
return this;
}
/**
* Add a group stage
*/
group(groupBy: any): this {
this.pipeline.push({ $group: groupBy });
return this;
}
/**
* Add a sort stage
*/
sort(sortBy: any): this {
this.pipeline.push({ $sort: sortBy });
return this;
}
/**
* Add a limit stage
*/
limit(count: number): this {
this.pipeline.push({ $limit: count });
return this;
}
/**
* Add a skip stage
*/
skip(count: number): this {
this.pipeline.push({ $skip: count });
return this;
}
/**
* Add a project stage
*/
project(projection: any): this {
this.pipeline.push({ $project: projection });
return this;
}
/**
* Add an unwind stage
*/
unwind(field: string, options?: any): this {
this.pipeline.push({
$unwind: options ? { path: field, ...options } : field
});
return this;
}
/**
* Add a lookup stage (join)
*/
lookup(from: string, localField: string, foreignField: string, as: string): this {
this.pipeline.push({
$lookup: {
from,
localField,
foreignField,
as
}
});
return this;
}
/**
* Add a custom stage
*/
addStage(stage: any): this {
this.pipeline.push(stage);
return this;
}
/**
* Execute the aggregation pipeline
*/
async execute<T extends Document = Document>(): Promise<T[]> {
if (!this.collection) {
throw new Error('Collection not specified. Use .from() to set the collection.');
}
const collection = this.client.getCollection(this.collection);
return await collection.aggregate<T>(this.pipeline).toArray();
}
/**
* Get the pipeline array
*/
getPipeline(): any[] {
return [...this.pipeline];
}
/**
* Reset the pipeline
*/
reset(): this {
this.pipeline = [];
this.collection = null;
return this;
}
// Convenience methods for common aggregations
/**
* Sentiment analysis aggregation
*/
sentimentAnalysis(symbol?: string, timeframe?: { start: Date; end: Date }): this {
this.from('sentiment_data');
const matchConditions: any = {};
if (symbol) matchConditions.symbol = symbol;
if (timeframe) {
matchConditions.timestamp = {
$gte: timeframe.start,
$lte: timeframe.end
};
}
if (Object.keys(matchConditions).length > 0) {
this.match(matchConditions);
}
return this.group({
_id: {
symbol: '$symbol',
sentiment: '$sentiment_label'
},
count: { $sum: 1 },
avgScore: { $avg: '$sentiment_score' },
avgConfidence: { $avg: '$confidence' }
});
}
/**
* News article aggregation by publication
*/
newsByPublication(symbols?: string[]): this {
this.from('news_articles');
if (symbols && symbols.length > 0) {
this.match({ symbols: { $in: symbols } });
}
return this.group({
_id: '$publication',
articleCount: { $sum: 1 },
symbols: { $addToSet: '$symbols' },
avgSentiment: { $avg: '$sentiment_score' },
latestArticle: { $max: '$published_date' }
});
}
/**
* SEC filings by company
*/
secFilingsByCompany(filingTypes?: string[]): this {
this.from('sec_filings');
if (filingTypes && filingTypes.length > 0) {
this.match({ filing_type: { $in: filingTypes } });
}
return this.group({
_id: {
cik: '$cik',
company: '$company_name'
},
filingCount: { $sum: 1 },
filingTypes: { $addToSet: '$filing_type' },
latestFiling: { $max: '$filing_date' },
symbols: { $addToSet: '$symbols' }
});
}
/**
* Document processing status summary
*/
processingStatusSummary(collection: CollectionNames): this {
this.from(collection);
return this.group({
_id: '$processing_status',
count: { $sum: 1 },
avgSizeBytes: { $avg: '$size_bytes' },
oldestDocument: { $min: '$created_at' },
newestDocument: { $max: '$created_at' }
});
}
/**
* Time-based aggregation (daily/hourly counts)
*/
timeBasedCounts(
collection: CollectionNames,
dateField: string = 'created_at',
interval: 'hour' | 'day' | 'week' | 'month' = 'day'
): this {
this.from(collection);
const dateFormat = {
hour: { $dateToString: { format: '%Y-%m-%d %H:00:00', date: `$${dateField}` } },
day: { $dateToString: { format: '%Y-%m-%d', date: `$${dateField}` } },
week: { $dateToString: { format: '%Y-W%V', date: `$${dateField}` } },
month: { $dateToString: { format: '%Y-%m', date: `$${dateField}` } }
};
return this.group({
_id: dateFormat[interval],
count: { $sum: 1 },
firstDocument: { $min: `$${dateField}` },
lastDocument: { $max: `$${dateField}` }
}).sort({ _id: 1 });
}
}
import type { Document } from 'mongodb';
import type { MongoDBClient } from './client';
import type { CollectionNames } from './types';
/**
* MongoDB Aggregation Builder
*
* Provides a fluent interface for building MongoDB aggregation pipelines
*/
export class MongoDBAggregationBuilder {
private pipeline: any[] = [];
private readonly client: MongoDBClient;
private collection: CollectionNames | null = null;
constructor(client: MongoDBClient) {
this.client = client;
}
/**
* Set the collection to aggregate on
*/
from(collection: CollectionNames): this {
this.collection = collection;
return this;
}
/**
* Add a match stage
*/
match(filter: any): this {
this.pipeline.push({ $match: filter });
return this;
}
/**
* Add a group stage
*/
group(groupBy: any): this {
this.pipeline.push({ $group: groupBy });
return this;
}
/**
* Add a sort stage
*/
sort(sortBy: any): this {
this.pipeline.push({ $sort: sortBy });
return this;
}
/**
* Add a limit stage
*/
limit(count: number): this {
this.pipeline.push({ $limit: count });
return this;
}
/**
* Add a skip stage
*/
skip(count: number): this {
this.pipeline.push({ $skip: count });
return this;
}
/**
* Add a project stage
*/
project(projection: any): this {
this.pipeline.push({ $project: projection });
return this;
}
/**
* Add an unwind stage
*/
unwind(field: string, options?: any): this {
this.pipeline.push({
$unwind: options ? { path: field, ...options } : field
});
return this;
}
/**
* Add a lookup stage (join)
*/
lookup(from: string, localField: string, foreignField: string, as: string): this {
this.pipeline.push({
$lookup: {
from,
localField,
foreignField,
as
}
});
return this;
}
/**
* Add a custom stage
*/
addStage(stage: any): this {
this.pipeline.push(stage);
return this;
}
/**
* Execute the aggregation pipeline
*/
async execute<T extends Document = Document>(): Promise<T[]> {
if (!this.collection) {
throw new Error('Collection not specified. Use .from() to set the collection.');
}
const collection = this.client.getCollection(this.collection);
return await collection.aggregate<T>(this.pipeline).toArray();
}
/**
* Get the pipeline array
*/
getPipeline(): any[] {
return [...this.pipeline];
}
/**
* Reset the pipeline
*/
reset(): this {
this.pipeline = [];
this.collection = null;
return this;
}
// Convenience methods for common aggregations
/**
* Sentiment analysis aggregation
*/
sentimentAnalysis(symbol?: string, timeframe?: { start: Date; end: Date }): this {
this.from('sentiment_data');
const matchConditions: any = {};
if (symbol) matchConditions.symbol = symbol;
if (timeframe) {
matchConditions.timestamp = {
$gte: timeframe.start,
$lte: timeframe.end
};
}
if (Object.keys(matchConditions).length > 0) {
this.match(matchConditions);
}
return this.group({
_id: {
symbol: '$symbol',
sentiment: '$sentiment_label'
},
count: { $sum: 1 },
avgScore: { $avg: '$sentiment_score' },
avgConfidence: { $avg: '$confidence' }
});
}
/**
* News article aggregation by publication
*/
newsByPublication(symbols?: string[]): this {
this.from('news_articles');
if (symbols && symbols.length > 0) {
this.match({ symbols: { $in: symbols } });
}
return this.group({
_id: '$publication',
articleCount: { $sum: 1 },
symbols: { $addToSet: '$symbols' },
avgSentiment: { $avg: '$sentiment_score' },
latestArticle: { $max: '$published_date' }
});
}
/**
* SEC filings by company
*/
secFilingsByCompany(filingTypes?: string[]): this {
this.from('sec_filings');
if (filingTypes && filingTypes.length > 0) {
this.match({ filing_type: { $in: filingTypes } });
}
return this.group({
_id: {
cik: '$cik',
company: '$company_name'
},
filingCount: { $sum: 1 },
filingTypes: { $addToSet: '$filing_type' },
latestFiling: { $max: '$filing_date' },
symbols: { $addToSet: '$symbols' }
});
}
/**
* Document processing status summary
*/
processingStatusSummary(collection: CollectionNames): this {
this.from(collection);
return this.group({
_id: '$processing_status',
count: { $sum: 1 },
avgSizeBytes: { $avg: '$size_bytes' },
oldestDocument: { $min: '$created_at' },
newestDocument: { $max: '$created_at' }
});
}
/**
* Time-based aggregation (daily/hourly counts)
*/
timeBasedCounts(
collection: CollectionNames,
dateField: string = 'created_at',
interval: 'hour' | 'day' | 'week' | 'month' = 'day'
): this {
this.from(collection);
const dateFormat = {
hour: { $dateToString: { format: '%Y-%m-%d %H:00:00', date: `$${dateField}` } },
day: { $dateToString: { format: '%Y-%m-%d', date: `$${dateField}` } },
week: { $dateToString: { format: '%Y-W%V', date: `$${dateField}` } },
month: { $dateToString: { format: '%Y-%m', date: `$${dateField}` } }
};
return this.group({
_id: dateFormat[interval],
count: { $sum: 1 },
firstDocument: { $min: `$${dateField}` },
lastDocument: { $max: `$${dateField}` }
}).sort({ _id: 1 });
}
}

View file

@ -1,379 +1,379 @@
import { MongoClient, Db, Collection, MongoClientOptions, Document, WithId, OptionalUnlessRequiredId } from 'mongodb';
import { mongodbConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import type {
MongoDBClientConfig,
MongoDBConnectionOptions,
CollectionNames,
DocumentBase,
SentimentData,
RawDocument,
NewsArticle,
SecFiling,
EarningsTranscript,
AnalystReport
} from './types';
import { MongoDBHealthMonitor } from './health';
import { schemaMap } from './schemas';
import * as yup from 'yup';
/**
* MongoDB Client for Stock Bot
*
* Provides type-safe access to MongoDB collections with built-in
* health monitoring, connection pooling, and schema validation.
*/
export class MongoDBClient {
private client: MongoClient | null = null;
private db: Db | null = null;
private readonly config: MongoDBClientConfig;
private readonly options: MongoDBConnectionOptions;
private readonly logger: ReturnType<typeof getLogger>;
private readonly healthMonitor: MongoDBHealthMonitor;
private isConnected = false;
constructor(
config?: Partial<MongoDBClientConfig>,
options?: MongoDBConnectionOptions
) {
this.config = this.buildConfig(config);
this.options = {
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options
};
this.logger = getLogger('mongodb-client');
this.healthMonitor = new MongoDBHealthMonitor(this);
}
/**
* Connect to MongoDB
*/
async connect(): Promise<void> {
if (this.isConnected && this.client) {
return;
}
const uri = this.buildConnectionUri();
const clientOptions = this.buildClientOptions();
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(`Connecting to MongoDB (attempt ${attempt}/${this.options.retryAttempts})...`);
this.client = new MongoClient(uri, clientOptions);
await this.client.connect();
// Test the connection
await this.client.db(this.config.database).admin().ping();
this.db = this.client.db(this.config.database);
this.isConnected = true;
this.logger.info('Successfully connected to MongoDB');
// Start health monitoring
this.healthMonitor.start();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`MongoDB connection attempt ${attempt} failed:`, error);
if (this.client) {
await this.client.close();
this.client = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(`Failed to connect to MongoDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`);
}
/**
* Disconnect from MongoDB
*/
async disconnect(): Promise<void> {
if (!this.client) {
return;
}
try {
this.healthMonitor.stop();
await this.client.close();
this.isConnected = false;
this.client = null;
this.db = null;
this.logger.info('Disconnected from MongoDB');
} catch (error) {
this.logger.error('Error disconnecting from MongoDB:', error);
throw error;
}
}
/**
* Get a typed collection
*/
getCollection<T extends DocumentBase>(name: CollectionNames): Collection<T> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
return this.db.collection<T>(name);
}
/**
* Insert a document with validation
*/
async insertOne<T extends DocumentBase>(
collectionName: CollectionNames,
document: Omit<T, '_id' | 'created_at' | 'updated_at'> & Partial<Pick<T, 'created_at' | 'updated_at'>>
): Promise<T> {
const collection = this.getCollection<T>(collectionName);
// Add timestamps
const now = new Date();
const docWithTimestamps = {
...document,
created_at: document.created_at || now,
updated_at: now
} as T; // Validate document if schema exists
if (collectionName in schemaMap) {
try {
(schemaMap as any)[collectionName].validateSync(docWithTimestamps);
} catch (error) {
if (error instanceof yup.ValidationError) {
this.logger.error(`Document validation failed for ${collectionName}:`, error.errors);
throw new Error(`Document validation failed: ${error.errors?.map(e => e).join(', ')}`);
}
throw error;
}
}const result = await collection.insertOne(docWithTimestamps as OptionalUnlessRequiredId<T>);
return { ...docWithTimestamps, _id: result.insertedId } as T;
}
/**
* Update a document with validation
*/
async updateOne<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any,
update: Partial<T>
): Promise<boolean> {
const collection = this.getCollection<T>(collectionName);
// Add updated timestamp
const updateWithTimestamp = {
...update,
updated_at: new Date()
};
const result = await collection.updateOne(filter, { $set: updateWithTimestamp });
return result.modifiedCount > 0;
}
/**
* Find documents with optional validation
*/
async find<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any = {},
options: any = {}
): Promise<T[]> {
const collection = this.getCollection<T>(collectionName);
return await collection.find(filter, options).toArray() as T[];
}
/**
* Find one document
*/
async findOne<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any
): Promise<T | null> {
const collection = this.getCollection<T>(collectionName);
return await collection.findOne(filter) as T | null;
}
/**
* Aggregate with type safety
*/
async aggregate<T extends DocumentBase>(
collectionName: CollectionNames,
pipeline: any[]
): Promise<T[]> {
const collection = this.getCollection<T>(collectionName);
return await collection.aggregate<T>(pipeline).toArray();
}
/**
* Count documents
*/
async countDocuments(
collectionName: CollectionNames,
filter: any = {}
): Promise<number> {
const collection = this.getCollection(collectionName);
return await collection.countDocuments(filter);
}
/**
* Create indexes for better performance
*/
async createIndexes(): Promise<void> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
try {
// Sentiment data indexes
await this.db.collection('sentiment_data').createIndexes([
{ key: { symbol: 1, timestamp: -1 } },
{ key: { sentiment_label: 1 } },
{ key: { source_type: 1 } },
{ key: { created_at: -1 } }
]);
// News articles indexes
await this.db.collection('news_articles').createIndexes([
{ key: { symbols: 1, published_date: -1 } },
{ key: { publication: 1 } },
{ key: { categories: 1 } },
{ key: { created_at: -1 } }
]);
// SEC filings indexes
await this.db.collection('sec_filings').createIndexes([
{ key: { symbols: 1, filing_date: -1 } },
{ key: { filing_type: 1 } },
{ key: { cik: 1 } },
{ key: { created_at: -1 } }
]); // Raw documents indexes
await this.db.collection('raw_documents').createIndex(
{ content_hash: 1 },
{ unique: true }
);
await this.db.collection('raw_documents').createIndexes([
{ key: { processing_status: 1 } },
{ key: { document_type: 1 } },
{ key: { created_at: -1 } }
]);
this.logger.info('MongoDB indexes created successfully');
} catch (error) {
this.logger.error('Error creating MongoDB indexes:', error);
throw error;
}
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
return await this.db.stats();
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.client;
}
/**
* Get the underlying MongoDB client
*/
get mongoClient(): MongoClient | null {
return this.client;
}
/**
* Get the database instance
*/
get database(): Db | null {
return this.db;
}
private buildConfig(config?: Partial<MongoDBClientConfig>): MongoDBClientConfig {
return {
host: config?.host || mongodbConfig.MONGODB_HOST,
port: config?.port || mongodbConfig.MONGODB_PORT,
database: config?.database || mongodbConfig.MONGODB_DATABASE,
username: config?.username || mongodbConfig.MONGODB_USERNAME,
password: config?.password || mongodbConfig.MONGODB_PASSWORD,
authSource: config?.authSource || mongodbConfig.MONGODB_AUTH_SOURCE,
uri: config?.uri || mongodbConfig.MONGODB_URI,
poolSettings: {
maxPoolSize: mongodbConfig.MONGODB_MAX_POOL_SIZE,
minPoolSize: mongodbConfig.MONGODB_MIN_POOL_SIZE,
maxIdleTime: mongodbConfig.MONGODB_MAX_IDLE_TIME,
...config?.poolSettings
},
timeouts: {
connectTimeout: mongodbConfig.MONGODB_CONNECT_TIMEOUT,
socketTimeout: mongodbConfig.MONGODB_SOCKET_TIMEOUT,
serverSelectionTimeout: mongodbConfig.MONGODB_SERVER_SELECTION_TIMEOUT,
...config?.timeouts
},
tls: {
enabled: mongodbConfig.MONGODB_TLS,
insecure: mongodbConfig.MONGODB_TLS_INSECURE,
caFile: mongodbConfig.MONGODB_TLS_CA_FILE,
...config?.tls
},
options: {
retryWrites: mongodbConfig.MONGODB_RETRY_WRITES,
journal: mongodbConfig.MONGODB_JOURNAL,
readPreference: mongodbConfig.MONGODB_READ_PREFERENCE as any,
writeConcern: mongodbConfig.MONGODB_WRITE_CONCERN,
...config?.options
}
};
}
private buildConnectionUri(): string {
if (this.config.uri) {
return this.config.uri;
}
const { host, port, username, password, database, authSource } = this.config;
const auth = username && password ? `${username}:${password}@` : '';
const authDb = authSource ? `?authSource=${authSource}` : '';
return `mongodb://${auth}${host}:${port}/${database}${authDb}`;
}
private buildClientOptions(): MongoClientOptions {
return {
maxPoolSize: this.config.poolSettings?.maxPoolSize,
minPoolSize: this.config.poolSettings?.minPoolSize,
maxIdleTimeMS: this.config.poolSettings?.maxIdleTime,
connectTimeoutMS: this.config.timeouts?.connectTimeout,
socketTimeoutMS: this.config.timeouts?.socketTimeout,
serverSelectionTimeoutMS: this.config.timeouts?.serverSelectionTimeout,
retryWrites: this.config.options?.retryWrites,
journal: this.config.options?.journal,
readPreference: this.config.options?.readPreference, writeConcern: this.config.options?.writeConcern ? {
w: this.config.options.writeConcern === 'majority'
? 'majority' as const
: parseInt(this.config.options.writeConcern, 10) || 1
} : undefined,
tls: this.config.tls?.enabled,
tlsInsecure: this.config.tls?.insecure,
tlsCAFile: this.config.tls?.caFile
};
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
import { MongoClient, Db, Collection, MongoClientOptions, Document, WithId, OptionalUnlessRequiredId } from 'mongodb';
import { mongodbConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import type {
MongoDBClientConfig,
MongoDBConnectionOptions,
CollectionNames,
DocumentBase,
SentimentData,
RawDocument,
NewsArticle,
SecFiling,
EarningsTranscript,
AnalystReport
} from './types';
import { MongoDBHealthMonitor } from './health';
import { schemaMap } from './schemas';
import * as yup from 'yup';
/**
* MongoDB Client for Stock Bot
*
* Provides type-safe access to MongoDB collections with built-in
* health monitoring, connection pooling, and schema validation.
*/
export class MongoDBClient {
private client: MongoClient | null = null;
private db: Db | null = null;
private readonly config: MongoDBClientConfig;
private readonly options: MongoDBConnectionOptions;
private readonly logger: ReturnType<typeof getLogger>;
private readonly healthMonitor: MongoDBHealthMonitor;
private isConnected = false;
constructor(
config?: Partial<MongoDBClientConfig>,
options?: MongoDBConnectionOptions
) {
this.config = this.buildConfig(config);
this.options = {
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options
};
this.logger = getLogger('mongodb-client');
this.healthMonitor = new MongoDBHealthMonitor(this);
}
/**
* Connect to MongoDB
*/
async connect(): Promise<void> {
if (this.isConnected && this.client) {
return;
}
const uri = this.buildConnectionUri();
const clientOptions = this.buildClientOptions();
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(`Connecting to MongoDB (attempt ${attempt}/${this.options.retryAttempts})...`);
this.client = new MongoClient(uri, clientOptions);
await this.client.connect();
// Test the connection
await this.client.db(this.config.database).admin().ping();
this.db = this.client.db(this.config.database);
this.isConnected = true;
this.logger.info('Successfully connected to MongoDB');
// Start health monitoring
this.healthMonitor.start();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`MongoDB connection attempt ${attempt} failed:`, error);
if (this.client) {
await this.client.close();
this.client = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(`Failed to connect to MongoDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`);
}
/**
* Disconnect from MongoDB
*/
async disconnect(): Promise<void> {
if (!this.client) {
return;
}
try {
this.healthMonitor.stop();
await this.client.close();
this.isConnected = false;
this.client = null;
this.db = null;
this.logger.info('Disconnected from MongoDB');
} catch (error) {
this.logger.error('Error disconnecting from MongoDB:', error);
throw error;
}
}
/**
* Get a typed collection
*/
getCollection<T extends DocumentBase>(name: CollectionNames): Collection<T> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
return this.db.collection<T>(name);
}
/**
* Insert a document with validation
*/
async insertOne<T extends DocumentBase>(
collectionName: CollectionNames,
document: Omit<T, '_id' | 'created_at' | 'updated_at'> & Partial<Pick<T, 'created_at' | 'updated_at'>>
): Promise<T> {
const collection = this.getCollection<T>(collectionName);
// Add timestamps
const now = new Date();
const docWithTimestamps = {
...document,
created_at: document.created_at || now,
updated_at: now
} as T; // Validate document if schema exists
if (collectionName in schemaMap) {
try {
(schemaMap as any)[collectionName].validateSync(docWithTimestamps);
} catch (error) {
if (error instanceof yup.ValidationError) {
this.logger.error(`Document validation failed for ${collectionName}:`, error.errors);
throw new Error(`Document validation failed: ${error.errors?.map(e => e).join(', ')}`);
}
throw error;
}
}const result = await collection.insertOne(docWithTimestamps as OptionalUnlessRequiredId<T>);
return { ...docWithTimestamps, _id: result.insertedId } as T;
}
/**
* Update a document with validation
*/
async updateOne<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any,
update: Partial<T>
): Promise<boolean> {
const collection = this.getCollection<T>(collectionName);
// Add updated timestamp
const updateWithTimestamp = {
...update,
updated_at: new Date()
};
const result = await collection.updateOne(filter, { $set: updateWithTimestamp });
return result.modifiedCount > 0;
}
/**
* Find documents with optional validation
*/
async find<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any = {},
options: any = {}
): Promise<T[]> {
const collection = this.getCollection<T>(collectionName);
return await collection.find(filter, options).toArray() as T[];
}
/**
* Find one document
*/
async findOne<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any
): Promise<T | null> {
const collection = this.getCollection<T>(collectionName);
return await collection.findOne(filter) as T | null;
}
/**
* Aggregate with type safety
*/
async aggregate<T extends DocumentBase>(
collectionName: CollectionNames,
pipeline: any[]
): Promise<T[]> {
const collection = this.getCollection<T>(collectionName);
return await collection.aggregate<T>(pipeline).toArray();
}
/**
* Count documents
*/
async countDocuments(
collectionName: CollectionNames,
filter: any = {}
): Promise<number> {
const collection = this.getCollection(collectionName);
return await collection.countDocuments(filter);
}
/**
* Create indexes for better performance
*/
async createIndexes(): Promise<void> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
try {
// Sentiment data indexes
await this.db.collection('sentiment_data').createIndexes([
{ key: { symbol: 1, timestamp: -1 } },
{ key: { sentiment_label: 1 } },
{ key: { source_type: 1 } },
{ key: { created_at: -1 } }
]);
// News articles indexes
await this.db.collection('news_articles').createIndexes([
{ key: { symbols: 1, published_date: -1 } },
{ key: { publication: 1 } },
{ key: { categories: 1 } },
{ key: { created_at: -1 } }
]);
// SEC filings indexes
await this.db.collection('sec_filings').createIndexes([
{ key: { symbols: 1, filing_date: -1 } },
{ key: { filing_type: 1 } },
{ key: { cik: 1 } },
{ key: { created_at: -1 } }
]); // Raw documents indexes
await this.db.collection('raw_documents').createIndex(
{ content_hash: 1 },
{ unique: true }
);
await this.db.collection('raw_documents').createIndexes([
{ key: { processing_status: 1 } },
{ key: { document_type: 1 } },
{ key: { created_at: -1 } }
]);
this.logger.info('MongoDB indexes created successfully');
} catch (error) {
this.logger.error('Error creating MongoDB indexes:', error);
throw error;
}
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
return await this.db.stats();
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.client;
}
/**
* Get the underlying MongoDB client
*/
get mongoClient(): MongoClient | null {
return this.client;
}
/**
* Get the database instance
*/
get database(): Db | null {
return this.db;
}
private buildConfig(config?: Partial<MongoDBClientConfig>): MongoDBClientConfig {
return {
host: config?.host || mongodbConfig.MONGODB_HOST,
port: config?.port || mongodbConfig.MONGODB_PORT,
database: config?.database || mongodbConfig.MONGODB_DATABASE,
username: config?.username || mongodbConfig.MONGODB_USERNAME,
password: config?.password || mongodbConfig.MONGODB_PASSWORD,
authSource: config?.authSource || mongodbConfig.MONGODB_AUTH_SOURCE,
uri: config?.uri || mongodbConfig.MONGODB_URI,
poolSettings: {
maxPoolSize: mongodbConfig.MONGODB_MAX_POOL_SIZE,
minPoolSize: mongodbConfig.MONGODB_MIN_POOL_SIZE,
maxIdleTime: mongodbConfig.MONGODB_MAX_IDLE_TIME,
...config?.poolSettings
},
timeouts: {
connectTimeout: mongodbConfig.MONGODB_CONNECT_TIMEOUT,
socketTimeout: mongodbConfig.MONGODB_SOCKET_TIMEOUT,
serverSelectionTimeout: mongodbConfig.MONGODB_SERVER_SELECTION_TIMEOUT,
...config?.timeouts
},
tls: {
enabled: mongodbConfig.MONGODB_TLS,
insecure: mongodbConfig.MONGODB_TLS_INSECURE,
caFile: mongodbConfig.MONGODB_TLS_CA_FILE,
...config?.tls
},
options: {
retryWrites: mongodbConfig.MONGODB_RETRY_WRITES,
journal: mongodbConfig.MONGODB_JOURNAL,
readPreference: mongodbConfig.MONGODB_READ_PREFERENCE as any,
writeConcern: mongodbConfig.MONGODB_WRITE_CONCERN,
...config?.options
}
};
}
private buildConnectionUri(): string {
if (this.config.uri) {
return this.config.uri;
}
const { host, port, username, password, database, authSource } = this.config;
const auth = username && password ? `${username}:${password}@` : '';
const authDb = authSource ? `?authSource=${authSource}` : '';
return `mongodb://${auth}${host}:${port}/${database}${authDb}`;
}
private buildClientOptions(): MongoClientOptions {
return {
maxPoolSize: this.config.poolSettings?.maxPoolSize,
minPoolSize: this.config.poolSettings?.minPoolSize,
maxIdleTimeMS: this.config.poolSettings?.maxIdleTime,
connectTimeoutMS: this.config.timeouts?.connectTimeout,
socketTimeoutMS: this.config.timeouts?.socketTimeout,
serverSelectionTimeoutMS: this.config.timeouts?.serverSelectionTimeout,
retryWrites: this.config.options?.retryWrites,
journal: this.config.options?.journal,
readPreference: this.config.options?.readPreference, writeConcern: this.config.options?.writeConcern ? {
w: this.config.options.writeConcern === 'majority'
? 'majority' as const
: parseInt(this.config.options.writeConcern, 10) || 1
} : undefined,
tls: this.config.tls?.enabled,
tlsInsecure: this.config.tls?.insecure,
tlsCAFile: this.config.tls?.caFile
};
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}

View file

@ -1,66 +1,66 @@
import { MongoDBClient } from './client';
import { mongodbConfig } from '@stock-bot/config';
import type { MongoDBClientConfig, MongoDBConnectionOptions } from './types';
/**
* Factory function to create a MongoDB client instance
*/
export function createMongoDBClient(
config?: Partial<MongoDBClientConfig>,
options?: MongoDBConnectionOptions
): MongoDBClient {
return new MongoDBClient(config, options);
}
/**
* Create a MongoDB client with default configuration
*/
export function createDefaultMongoDBClient(): MongoDBClient {
const config: Partial<MongoDBClientConfig> = {
host: mongodbConfig.MONGODB_HOST,
port: mongodbConfig.MONGODB_PORT,
database: mongodbConfig.MONGODB_DATABASE,
username: mongodbConfig.MONGODB_USERNAME,
password: mongodbConfig.MONGODB_PASSWORD,
uri: mongodbConfig.MONGODB_URI
};
return new MongoDBClient(config);
}
/**
* Singleton MongoDB client instance
*/
let defaultClient: MongoDBClient | null = null;
/**
* Get or create the default MongoDB client instance
*/
export function getMongoDBClient(): MongoDBClient {
if (!defaultClient) {
defaultClient = createDefaultMongoDBClient();
}
return defaultClient;
}
/**
* Connect to MongoDB using the default client
*/
export async function connectMongoDB(): Promise<MongoDBClient> {
const client = getMongoDBClient();
if (!client.connected) {
await client.connect();
await client.createIndexes();
}
return client;
}
/**
* Disconnect from MongoDB
*/
export async function disconnectMongoDB(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}
import { MongoDBClient } from './client';
import { mongodbConfig } from '@stock-bot/config';
import type { MongoDBClientConfig, MongoDBConnectionOptions } from './types';
/**
* Factory function to create a MongoDB client instance
*/
export function createMongoDBClient(
config?: Partial<MongoDBClientConfig>,
options?: MongoDBConnectionOptions
): MongoDBClient {
return new MongoDBClient(config, options);
}
/**
* Create a MongoDB client with default configuration
*/
export function createDefaultMongoDBClient(): MongoDBClient {
const config: Partial<MongoDBClientConfig> = {
host: mongodbConfig.MONGODB_HOST,
port: mongodbConfig.MONGODB_PORT,
database: mongodbConfig.MONGODB_DATABASE,
username: mongodbConfig.MONGODB_USERNAME,
password: mongodbConfig.MONGODB_PASSWORD,
uri: mongodbConfig.MONGODB_URI
};
return new MongoDBClient(config);
}
/**
* Singleton MongoDB client instance
*/
let defaultClient: MongoDBClient | null = null;
/**
* Get or create the default MongoDB client instance
*/
export function getMongoDBClient(): MongoDBClient {
if (!defaultClient) {
defaultClient = createDefaultMongoDBClient();
}
return defaultClient;
}
/**
* Connect to MongoDB using the default client
*/
export async function connectMongoDB(): Promise<MongoDBClient> {
const client = getMongoDBClient();
if (!client.connected) {
await client.connect();
await client.createIndexes();
}
return client;
}
/**
* Disconnect from MongoDB
*/
export async function disconnectMongoDB(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}

View file

@ -1,226 +1,226 @@
import { getLogger } from '@stock-bot/logger';
import type { MongoDBClient } from './client';
import type { MongoDBHealthCheck, MongoDBHealthStatus, MongoDBMetrics } from './types';
/**
* MongoDB Health Monitor
*
* Monitors MongoDB connection health and provides metrics
*/
export class MongoDBHealthMonitor {
private readonly client: MongoDBClient;
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private metrics: MongoDBMetrics;
private lastHealthCheck: MongoDBHealthCheck | null = null;
constructor(client: MongoDBClient) {
this.client = client;
this.logger = getLogger('mongodb-health-monitor');
this.metrics = {
operationsPerSecond: 0,
averageLatency: 0,
errorRate: 0,
connectionPoolUtilization: 0,
documentsProcessed: 0
};
}
/**
* Start health monitoring
*/
start(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stop();
}
this.logger.info(`Starting MongoDB health monitoring (interval: ${intervalMs}ms)`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed:', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed:', error);
});
}
/**
* Stop health monitoring
*/
stop(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Stopped MongoDB health monitoring');
}
}
/**
* Get current health status
*/
async getHealth(): Promise<MongoDBHealthCheck> {
if (!this.lastHealthCheck) {
await this.performHealthCheck();
}
return this.lastHealthCheck!;
}
/**
* Get current metrics
*/
getMetrics(): MongoDBMetrics {
return { ...this.metrics };
}
/**
* Perform a health check
*/
private async performHealthCheck(): Promise<void> {
const startTime = Date.now();
const errors: string[] = [];
let status: MongoDBHealthStatus = 'healthy';
try {
if (!this.client.connected) {
errors.push('MongoDB client not connected');
status = 'unhealthy';
} else {
// Test basic connectivity
const mongoClient = this.client.mongoClient;
const db = this.client.database;
if (!mongoClient || !db) {
errors.push('MongoDB client or database not available');
status = 'unhealthy';
} else {
// Ping the database
await db.admin().ping();
// Get server status for metrics
try {
const serverStatus = await db.admin().serverStatus();
this.updateMetricsFromServerStatus(serverStatus);
// Check connection pool status
const poolStats = this.getConnectionPoolStats(serverStatus);
if (poolStats.utilization > 0.9) {
errors.push('High connection pool utilization');
status = status === 'healthy' ? 'degraded' : status;
}
// Check for high latency
const latency = Date.now() - startTime;
if (latency > 1000) {
errors.push(`High latency: ${latency}ms`);
status = status === 'healthy' ? 'degraded' : status;
}
} catch (statusError) {
errors.push(`Failed to get server status: ${(statusError as Error).message}`);
status = 'degraded';
}
}
}
} catch (error) {
errors.push(`Health check failed: ${(error as Error).message}`);
status = 'unhealthy';
}
const latency = Date.now() - startTime;
// Get connection stats
const connectionStats = this.getConnectionStats();
this.lastHealthCheck = {
status,
timestamp: new Date(),
latency,
connections: connectionStats,
errors: errors.length > 0 ? errors : undefined
};
// Log health status changes
if (status !== 'healthy') {
this.logger.warn(`MongoDB health status: ${status}`, { errors, latency });
} else {
this.logger.debug(`MongoDB health check passed (${latency}ms)`);
}
}
/**
* Update metrics from MongoDB server status
*/
private updateMetricsFromServerStatus(serverStatus: any): void {
try {
const opcounters = serverStatus.opcounters || {};
const connections = serverStatus.connections || {};
const dur = serverStatus.dur || {};
// Calculate operations per second (approximate)
const totalOps = Object.values(opcounters).reduce((sum: number, count: any) => sum + (count || 0), 0);
this.metrics.operationsPerSecond = totalOps;
// Connection pool utilization
if (connections.current && connections.available) {
const total = connections.current + connections.available;
this.metrics.connectionPoolUtilization = connections.current / total;
}
// Average latency (from durability stats if available)
if (dur.timeMS) {
this.metrics.averageLatency = dur.timeMS.dt || 0;
} } catch (error) {
this.logger.debug('Error parsing server status for metrics:', error as any);
}
}
/**
* Get connection pool statistics
*/
private getConnectionPoolStats(serverStatus: any): { utilization: number; active: number; available: number } {
const connections = serverStatus.connections || {};
const active = connections.current || 0;
const available = connections.available || 0;
const total = active + available;
return {
utilization: total > 0 ? active / total : 0,
active,
available
};
}
/**
* Get connection statistics
*/
private getConnectionStats(): { active: number; available: number; total: number } {
// This would ideally come from the MongoDB driver's connection pool
// For now, we'll return estimated values
return {
active: 1,
available: 9,
total: 10
};
}
/**
* Update error rate metric
*/
updateErrorRate(errorCount: number, totalOperations: number): void {
this.metrics.errorRate = totalOperations > 0 ? errorCount / totalOperations : 0;
}
/**
* Update documents processed metric
*/
updateDocumentsProcessed(count: number): void {
this.metrics.documentsProcessed += count;
}
}
import { getLogger } from '@stock-bot/logger';
import type { MongoDBClient } from './client';
import type { MongoDBHealthCheck, MongoDBHealthStatus, MongoDBMetrics } from './types';
/**
* MongoDB Health Monitor
*
* Monitors MongoDB connection health and provides metrics
*/
export class MongoDBHealthMonitor {
private readonly client: MongoDBClient;
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private metrics: MongoDBMetrics;
private lastHealthCheck: MongoDBHealthCheck | null = null;
constructor(client: MongoDBClient) {
this.client = client;
this.logger = getLogger('mongodb-health-monitor');
this.metrics = {
operationsPerSecond: 0,
averageLatency: 0,
errorRate: 0,
connectionPoolUtilization: 0,
documentsProcessed: 0
};
}
/**
* Start health monitoring
*/
start(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stop();
}
this.logger.info(`Starting MongoDB health monitoring (interval: ${intervalMs}ms)`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed:', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed:', error);
});
}
/**
* Stop health monitoring
*/
stop(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Stopped MongoDB health monitoring');
}
}
/**
* Get current health status
*/
async getHealth(): Promise<MongoDBHealthCheck> {
if (!this.lastHealthCheck) {
await this.performHealthCheck();
}
return this.lastHealthCheck!;
}
/**
* Get current metrics
*/
getMetrics(): MongoDBMetrics {
return { ...this.metrics };
}
/**
* Perform a health check
*/
private async performHealthCheck(): Promise<void> {
const startTime = Date.now();
const errors: string[] = [];
let status: MongoDBHealthStatus = 'healthy';
try {
if (!this.client.connected) {
errors.push('MongoDB client not connected');
status = 'unhealthy';
} else {
// Test basic connectivity
const mongoClient = this.client.mongoClient;
const db = this.client.database;
if (!mongoClient || !db) {
errors.push('MongoDB client or database not available');
status = 'unhealthy';
} else {
// Ping the database
await db.admin().ping();
// Get server status for metrics
try {
const serverStatus = await db.admin().serverStatus();
this.updateMetricsFromServerStatus(serverStatus);
// Check connection pool status
const poolStats = this.getConnectionPoolStats(serverStatus);
if (poolStats.utilization > 0.9) {
errors.push('High connection pool utilization');
status = status === 'healthy' ? 'degraded' : status;
}
// Check for high latency
const latency = Date.now() - startTime;
if (latency > 1000) {
errors.push(`High latency: ${latency}ms`);
status = status === 'healthy' ? 'degraded' : status;
}
} catch (statusError) {
errors.push(`Failed to get server status: ${(statusError as Error).message}`);
status = 'degraded';
}
}
}
} catch (error) {
errors.push(`Health check failed: ${(error as Error).message}`);
status = 'unhealthy';
}
const latency = Date.now() - startTime;
// Get connection stats
const connectionStats = this.getConnectionStats();
this.lastHealthCheck = {
status,
timestamp: new Date(),
latency,
connections: connectionStats,
errors: errors.length > 0 ? errors : undefined
};
// Log health status changes
if (status !== 'healthy') {
this.logger.warn(`MongoDB health status: ${status}`, { errors, latency });
} else {
this.logger.debug(`MongoDB health check passed (${latency}ms)`);
}
}
/**
* Update metrics from MongoDB server status
*/
private updateMetricsFromServerStatus(serverStatus: any): void {
try {
const opcounters = serverStatus.opcounters || {};
const connections = serverStatus.connections || {};
const dur = serverStatus.dur || {};
// Calculate operations per second (approximate)
const totalOps = Object.values(opcounters).reduce((sum: number, count: any) => sum + (count || 0), 0);
this.metrics.operationsPerSecond = totalOps;
// Connection pool utilization
if (connections.current && connections.available) {
const total = connections.current + connections.available;
this.metrics.connectionPoolUtilization = connections.current / total;
}
// Average latency (from durability stats if available)
if (dur.timeMS) {
this.metrics.averageLatency = dur.timeMS.dt || 0;
} } catch (error) {
this.logger.debug('Error parsing server status for metrics:', error as any);
}
}
/**
* Get connection pool statistics
*/
private getConnectionPoolStats(serverStatus: any): { utilization: number; active: number; available: number } {
const connections = serverStatus.connections || {};
const active = connections.current || 0;
const available = connections.available || 0;
const total = active + available;
return {
utilization: total > 0 ? active / total : 0,
active,
available
};
}
/**
* Get connection statistics
*/
private getConnectionStats(): { active: number; available: number; total: number } {
// This would ideally come from the MongoDB driver's connection pool
// For now, we'll return estimated values
return {
active: 1,
available: 9,
total: 10
};
}
/**
* Update error rate metric
*/
updateErrorRate(errorCount: number, totalOperations: number): void {
this.metrics.errorRate = totalOperations > 0 ? errorCount / totalOperations : 0;
}
/**
* Update documents processed metric
*/
updateDocumentsProcessed(count: number): void {
this.metrics.documentsProcessed += count;
}
}

View file

@ -1,40 +1,40 @@
/**
* MongoDB Client Library for Stock Bot
*
* Provides type-safe MongoDB access for document storage, sentiment data,
* and raw content processing.
*/
export { MongoDBClient } from './client';
export { MongoDBHealthMonitor } from './health';
export { MongoDBTransactionManager } from './transactions';
export { MongoDBAggregationBuilder } from './aggregation';
// Types
export type {
MongoDBClientConfig,
MongoDBConnectionOptions,
MongoDBHealthStatus,
MongoDBMetrics,
CollectionNames,
DocumentBase,
SentimentData,
RawDocument,
NewsArticle,
SecFiling,
EarningsTranscript,
AnalystReport
} from './types';
// Schemas
export {
sentimentDataSchema,
rawDocumentSchema,
newsArticleSchema,
secFilingSchema,
earningsTranscriptSchema,
analystReportSchema
} from './schemas';
// Utils
export { createMongoDBClient } from './factory';
/**
* MongoDB Client Library for Stock Bot
*
* Provides type-safe MongoDB access for document storage, sentiment data,
* and raw content processing.
*/
export { MongoDBClient } from './client';
export { MongoDBHealthMonitor } from './health';
export { MongoDBTransactionManager } from './transactions';
export { MongoDBAggregationBuilder } from './aggregation';
// Types
export type {
MongoDBClientConfig,
MongoDBConnectionOptions,
MongoDBHealthStatus,
MongoDBMetrics,
CollectionNames,
DocumentBase,
SentimentData,
RawDocument,
NewsArticle,
SecFiling,
EarningsTranscript,
AnalystReport
} from './types';
// Schemas
export {
sentimentDataSchema,
rawDocumentSchema,
newsArticleSchema,
secFilingSchema,
earningsTranscriptSchema,
analystReportSchema
} from './schemas';
// Utils
export { createMongoDBClient } from './factory';

View file

@ -1,132 +1,132 @@
import * as yup from 'yup';
/**
* Yup Schemas for MongoDB Document Validation
*/
// Base schema for all documents
export const documentBaseSchema = yup.object({
_id: yup.mixed().optional(),
created_at: yup.date().required(),
updated_at: yup.date().required(),
source: yup.string().required(),
metadata: yup.object().optional(),
});
// Sentiment Data Schema
export const sentimentDataSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
sentiment_score: yup.number().min(-1).max(1).required(),
sentiment_label: yup.string().oneOf(['positive', 'negative', 'neutral']).required(),
confidence: yup.number().min(0).max(1).required(),
text: yup.string().min(1).required(),
source_type: yup.string().oneOf(['reddit', 'twitter', 'news', 'forums']).required(),
source_id: yup.string().required(),
timestamp: yup.date().required(),
processed_at: yup.date().required(),
language: yup.string().default('en'),
keywords: yup.array(yup.string()).required(),
entities: yup.array(yup.object({
name: yup.string().required(),
type: yup.string().required(),
confidence: yup.number().min(0).max(1).required(),
})).required(),
});
// Raw Document Schema
export const rawDocumentSchema = documentBaseSchema.shape({
document_type: yup.string().oneOf(['html', 'pdf', 'text', 'json', 'xml']).required(),
content: yup.string().required(),
content_hash: yup.string().required(),
url: yup.string().url().optional(),
title: yup.string().optional(),
author: yup.string().optional(),
published_date: yup.date().optional(),
extracted_text: yup.string().optional(),
processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(),
size_bytes: yup.number().positive().required(),
language: yup.string().optional(),
});
// News Article Schema
export const newsArticleSchema = documentBaseSchema.shape({
headline: yup.string().min(1).required(),
content: yup.string().min(1).required(),
summary: yup.string().optional(),
author: yup.string().required(),
publication: yup.string().required(),
published_date: yup.date().required(),
url: yup.string().url().required(),
symbols: yup.array(yup.string()).required(),
categories: yup.array(yup.string()).required(),
sentiment_score: yup.number().min(-1).max(1).optional(),
relevance_score: yup.number().min(0).max(1).optional(),
image_url: yup.string().url().optional(),
tags: yup.array(yup.string()).required(),
});
// SEC Filing Schema
export const secFilingSchema = documentBaseSchema.shape({
cik: yup.string().required(),
accession_number: yup.string().required(),
filing_type: yup.string().required(),
company_name: yup.string().required(),
symbols: yup.array(yup.string()).required(),
filing_date: yup.date().required(),
period_end_date: yup.date().required(),
url: yup.string().url().required(),
content: yup.string().required(),
extracted_data: yup.object().optional(),
financial_statements: yup.array(yup.object({
statement_type: yup.string().required(),
data: yup.object().required(),
})).optional(),
processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(),
});
// Earnings Transcript Schema
export const earningsTranscriptSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
company_name: yup.string().required(),
quarter: yup.string().required(),
year: yup.number().min(2000).max(3000).required(),
call_date: yup.date().required(),
transcript: yup.string().required(),
participants: yup.array(yup.object({
name: yup.string().required(),
title: yup.string().required(),
type: yup.string().oneOf(['executive', 'analyst']).required(),
})).required(),
key_topics: yup.array(yup.string()).required(),
sentiment_analysis: yup.object({
overall_sentiment: yup.number().min(-1).max(1).required(),
topic_sentiments: yup.object().required(),
}).optional(),
financial_highlights: yup.object().optional(),
});
// Analyst Report Schema
export const analystReportSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
analyst_firm: yup.string().required(),
analyst_name: yup.string().required(),
report_title: yup.string().required(),
report_date: yup.date().required(),
rating: yup.string().oneOf(['buy', 'hold', 'sell', 'strong_buy', 'strong_sell']).required(),
price_target: yup.number().positive().optional(),
previous_rating: yup.string().optional(),
content: yup.string().required(),
summary: yup.string().required(),
key_points: yup.array(yup.string()).required(),
financial_projections: yup.object().optional(),
});
// Schema mapping for collections
export const schemaMap = {
sentiment_data: sentimentDataSchema,
raw_documents: rawDocumentSchema,
news_articles: newsArticleSchema,
sec_filings: secFilingSchema,
earnings_transcripts: earningsTranscriptSchema,
analyst_reports: analystReportSchema,
} as const;
import * as yup from 'yup';
/**
* Yup Schemas for MongoDB Document Validation
*/
// Base schema for all documents
export const documentBaseSchema = yup.object({
_id: yup.mixed().optional(),
created_at: yup.date().required(),
updated_at: yup.date().required(),
source: yup.string().required(),
metadata: yup.object().optional(),
});
// Sentiment Data Schema
export const sentimentDataSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
sentiment_score: yup.number().min(-1).max(1).required(),
sentiment_label: yup.string().oneOf(['positive', 'negative', 'neutral']).required(),
confidence: yup.number().min(0).max(1).required(),
text: yup.string().min(1).required(),
source_type: yup.string().oneOf(['reddit', 'twitter', 'news', 'forums']).required(),
source_id: yup.string().required(),
timestamp: yup.date().required(),
processed_at: yup.date().required(),
language: yup.string().default('en'),
keywords: yup.array(yup.string()).required(),
entities: yup.array(yup.object({
name: yup.string().required(),
type: yup.string().required(),
confidence: yup.number().min(0).max(1).required(),
})).required(),
});
// Raw Document Schema
export const rawDocumentSchema = documentBaseSchema.shape({
document_type: yup.string().oneOf(['html', 'pdf', 'text', 'json', 'xml']).required(),
content: yup.string().required(),
content_hash: yup.string().required(),
url: yup.string().url().optional(),
title: yup.string().optional(),
author: yup.string().optional(),
published_date: yup.date().optional(),
extracted_text: yup.string().optional(),
processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(),
size_bytes: yup.number().positive().required(),
language: yup.string().optional(),
});
// News Article Schema
export const newsArticleSchema = documentBaseSchema.shape({
headline: yup.string().min(1).required(),
content: yup.string().min(1).required(),
summary: yup.string().optional(),
author: yup.string().required(),
publication: yup.string().required(),
published_date: yup.date().required(),
url: yup.string().url().required(),
symbols: yup.array(yup.string()).required(),
categories: yup.array(yup.string()).required(),
sentiment_score: yup.number().min(-1).max(1).optional(),
relevance_score: yup.number().min(0).max(1).optional(),
image_url: yup.string().url().optional(),
tags: yup.array(yup.string()).required(),
});
// SEC Filing Schema
export const secFilingSchema = documentBaseSchema.shape({
cik: yup.string().required(),
accession_number: yup.string().required(),
filing_type: yup.string().required(),
company_name: yup.string().required(),
symbols: yup.array(yup.string()).required(),
filing_date: yup.date().required(),
period_end_date: yup.date().required(),
url: yup.string().url().required(),
content: yup.string().required(),
extracted_data: yup.object().optional(),
financial_statements: yup.array(yup.object({
statement_type: yup.string().required(),
data: yup.object().required(),
})).optional(),
processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(),
});
// Earnings Transcript Schema
export const earningsTranscriptSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
company_name: yup.string().required(),
quarter: yup.string().required(),
year: yup.number().min(2000).max(3000).required(),
call_date: yup.date().required(),
transcript: yup.string().required(),
participants: yup.array(yup.object({
name: yup.string().required(),
title: yup.string().required(),
type: yup.string().oneOf(['executive', 'analyst']).required(),
})).required(),
key_topics: yup.array(yup.string()).required(),
sentiment_analysis: yup.object({
overall_sentiment: yup.number().min(-1).max(1).required(),
topic_sentiments: yup.object().required(),
}).optional(),
financial_highlights: yup.object().optional(),
});
// Analyst Report Schema
export const analystReportSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
analyst_firm: yup.string().required(),
analyst_name: yup.string().required(),
report_title: yup.string().required(),
report_date: yup.date().required(),
rating: yup.string().oneOf(['buy', 'hold', 'sell', 'strong_buy', 'strong_sell']).required(),
price_target: yup.number().positive().optional(),
previous_rating: yup.string().optional(),
content: yup.string().required(),
summary: yup.string().required(),
key_points: yup.array(yup.string()).required(),
financial_projections: yup.object().optional(),
});
// Schema mapping for collections
export const schemaMap = {
sentiment_data: sentimentDataSchema,
raw_documents: rawDocumentSchema,
news_articles: newsArticleSchema,
sec_filings: secFilingSchema,
earnings_transcripts: earningsTranscriptSchema,
analyst_reports: analystReportSchema,
} as const;

View file

@ -1,238 +1,238 @@
import { getLogger } from '@stock-bot/logger';
import type { MongoDBClient } from './client';
import type { CollectionNames, DocumentBase } from './types';
import type { WithId, OptionalUnlessRequiredId } from 'mongodb';
/**
* MongoDB Transaction Manager
*
* Provides transaction support for multi-document operations
*/
export class MongoDBTransactionManager {
private readonly client: MongoDBClient;
private readonly logger: ReturnType<typeof getLogger>;
constructor(client: MongoDBClient) {
this.client = client;
this.logger = getLogger('mongodb-transaction-manager');
}
/**
* Execute operations within a transaction
*/
async withTransaction<T>(
operations: (session: any) => Promise<T>,
options?: {
readPreference?: string;
readConcern?: string;
writeConcern?: any;
maxCommitTimeMS?: number;
}
): Promise<T> {
const mongoClient = this.client.mongoClient;
if (!mongoClient) {
throw new Error('MongoDB client not connected');
}
const session = mongoClient.startSession();
try {
this.logger.debug('Starting MongoDB transaction');
const result = await session.withTransaction(
async () => {
return await operations(session);
}, {
readPreference: options?.readPreference as any,
readConcern: { level: options?.readConcern || 'majority' } as any,
writeConcern: options?.writeConcern || { w: 'majority' },
maxCommitTimeMS: options?.maxCommitTimeMS || 10000
}
);
this.logger.debug('MongoDB transaction completed successfully');
return result;
} catch (error) {
this.logger.error('MongoDB transaction failed:', error);
throw error;
} finally {
await session.endSession();
}
}
/**
* Batch insert documents across collections within a transaction
*/
async batchInsert(
operations: Array<{
collection: CollectionNames;
documents: DocumentBase[];
}>,
options?: { ordered?: boolean; bypassDocumentValidation?: boolean }
): Promise<void> {
await this.withTransaction(async (session) => {
for (const operation of operations) {
const collection = this.client.getCollection(operation.collection);
// Add timestamps to all documents
const now = new Date();
const documentsWithTimestamps = operation.documents.map(doc => ({
...doc,
created_at: doc.created_at || now,
updated_at: now
}));
await collection.insertMany(documentsWithTimestamps, {
session,
ordered: options?.ordered ?? true,
bypassDocumentValidation: options?.bypassDocumentValidation ?? false
});
this.logger.debug(`Inserted ${documentsWithTimestamps.length} documents into ${operation.collection}`);
}
});
}
/**
* Batch update documents across collections within a transaction
*/
async batchUpdate(
operations: Array<{
collection: CollectionNames;
filter: any;
update: any;
options?: any;
}>
): Promise<void> {
await this.withTransaction(async (session) => {
const results = [];
for (const operation of operations) {
const collection = this.client.getCollection(operation.collection);
// Add updated timestamp
const updateWithTimestamp = {
...operation.update,
$set: {
...operation.update.$set,
updated_at: new Date()
}
};
const result = await collection.updateMany(
operation.filter,
updateWithTimestamp,
{
session,
...operation.options
}
);
results.push(result);
this.logger.debug(`Updated ${result.modifiedCount} documents in ${operation.collection}`);
}
return results;
});
}
/**
* Move documents between collections within a transaction
*/
async moveDocuments<T extends DocumentBase>(
fromCollection: CollectionNames,
toCollection: CollectionNames,
filter: any,
transform?: (doc: T) => T
): Promise<number> {
return await this.withTransaction(async (session) => {
const sourceCollection = this.client.getCollection<T>(fromCollection);
const targetCollection = this.client.getCollection<T>(toCollection);
// Find documents to move
const documents = await sourceCollection.find(filter, { session }).toArray();
if (documents.length === 0) {
return 0;
} // Transform documents if needed
const documentsToInsert = transform
? documents.map((doc: WithId<T>) => transform(doc as T))
: documents;
// Add updated timestamp
const now = new Date();
documentsToInsert.forEach(doc => {
doc.updated_at = now;
}); // Insert into target collection
await targetCollection.insertMany(documentsToInsert as OptionalUnlessRequiredId<T>[], { session });
// Remove from source collection
const deleteResult = await sourceCollection.deleteMany(filter, { session });
this.logger.info(`Moved ${documents.length} documents from ${fromCollection} to ${toCollection}`);
return deleteResult.deletedCount || 0;
});
}
/**
* Archive old documents within a transaction
*/
async archiveDocuments(
sourceCollection: CollectionNames,
archiveCollection: CollectionNames,
cutoffDate: Date,
batchSize: number = 1000
): Promise<number> {
let totalArchived = 0;
while (true) {
const batchArchived = await this.withTransaction(async (session) => {
const collection = this.client.getCollection(sourceCollection);
const archiveCol = this.client.getCollection(archiveCollection);
// Find old documents
const documents = await collection.find(
{ created_at: { $lt: cutoffDate } },
{ limit: batchSize, session }
).toArray();
if (documents.length === 0) {
return 0;
}
// Add archive metadata
const now = new Date();
const documentsToArchive = documents.map(doc => ({
...doc,
archived_at: now,
archived_from: sourceCollection
}));
// Insert into archive collection
await archiveCol.insertMany(documentsToArchive, { session });
// Remove from source collection
const ids = documents.map(doc => doc._id);
const deleteResult = await collection.deleteMany(
{ _id: { $in: ids } },
{ session }
);
return deleteResult.deletedCount || 0;
});
totalArchived += batchArchived;
if (batchArchived === 0) {
break;
}
this.logger.debug(`Archived batch of ${batchArchived} documents`);
}
this.logger.info(`Archived ${totalArchived} documents from ${sourceCollection} to ${archiveCollection}`);
return totalArchived;
}
}
import { getLogger } from '@stock-bot/logger';
import type { MongoDBClient } from './client';
import type { CollectionNames, DocumentBase } from './types';
import type { WithId, OptionalUnlessRequiredId } from 'mongodb';
/**
* MongoDB Transaction Manager
*
* Provides transaction support for multi-document operations
*/
export class MongoDBTransactionManager {
private readonly client: MongoDBClient;
private readonly logger: ReturnType<typeof getLogger>;
constructor(client: MongoDBClient) {
this.client = client;
this.logger = getLogger('mongodb-transaction-manager');
}
/**
* Execute operations within a transaction
*/
async withTransaction<T>(
operations: (session: any) => Promise<T>,
options?: {
readPreference?: string;
readConcern?: string;
writeConcern?: any;
maxCommitTimeMS?: number;
}
): Promise<T> {
const mongoClient = this.client.mongoClient;
if (!mongoClient) {
throw new Error('MongoDB client not connected');
}
const session = mongoClient.startSession();
try {
this.logger.debug('Starting MongoDB transaction');
const result = await session.withTransaction(
async () => {
return await operations(session);
}, {
readPreference: options?.readPreference as any,
readConcern: { level: options?.readConcern || 'majority' } as any,
writeConcern: options?.writeConcern || { w: 'majority' },
maxCommitTimeMS: options?.maxCommitTimeMS || 10000
}
);
this.logger.debug('MongoDB transaction completed successfully');
return result;
} catch (error) {
this.logger.error('MongoDB transaction failed:', error);
throw error;
} finally {
await session.endSession();
}
}
/**
* Batch insert documents across collections within a transaction
*/
async batchInsert(
operations: Array<{
collection: CollectionNames;
documents: DocumentBase[];
}>,
options?: { ordered?: boolean; bypassDocumentValidation?: boolean }
): Promise<void> {
await this.withTransaction(async (session) => {
for (const operation of operations) {
const collection = this.client.getCollection(operation.collection);
// Add timestamps to all documents
const now = new Date();
const documentsWithTimestamps = operation.documents.map(doc => ({
...doc,
created_at: doc.created_at || now,
updated_at: now
}));
await collection.insertMany(documentsWithTimestamps, {
session,
ordered: options?.ordered ?? true,
bypassDocumentValidation: options?.bypassDocumentValidation ?? false
});
this.logger.debug(`Inserted ${documentsWithTimestamps.length} documents into ${operation.collection}`);
}
});
}
/**
* Batch update documents across collections within a transaction
*/
async batchUpdate(
operations: Array<{
collection: CollectionNames;
filter: any;
update: any;
options?: any;
}>
): Promise<void> {
await this.withTransaction(async (session) => {
const results = [];
for (const operation of operations) {
const collection = this.client.getCollection(operation.collection);
// Add updated timestamp
const updateWithTimestamp = {
...operation.update,
$set: {
...operation.update.$set,
updated_at: new Date()
}
};
const result = await collection.updateMany(
operation.filter,
updateWithTimestamp,
{
session,
...operation.options
}
);
results.push(result);
this.logger.debug(`Updated ${result.modifiedCount} documents in ${operation.collection}`);
}
return results;
});
}
/**
* Move documents between collections within a transaction
*/
async moveDocuments<T extends DocumentBase>(
fromCollection: CollectionNames,
toCollection: CollectionNames,
filter: any,
transform?: (doc: T) => T
): Promise<number> {
return await this.withTransaction(async (session) => {
const sourceCollection = this.client.getCollection<T>(fromCollection);
const targetCollection = this.client.getCollection<T>(toCollection);
// Find documents to move
const documents = await sourceCollection.find(filter, { session }).toArray();
if (documents.length === 0) {
return 0;
} // Transform documents if needed
const documentsToInsert = transform
? documents.map((doc: WithId<T>) => transform(doc as T))
: documents;
// Add updated timestamp
const now = new Date();
documentsToInsert.forEach(doc => {
doc.updated_at = now;
}); // Insert into target collection
await targetCollection.insertMany(documentsToInsert as OptionalUnlessRequiredId<T>[], { session });
// Remove from source collection
const deleteResult = await sourceCollection.deleteMany(filter, { session });
this.logger.info(`Moved ${documents.length} documents from ${fromCollection} to ${toCollection}`);
return deleteResult.deletedCount || 0;
});
}
/**
* Archive old documents within a transaction
*/
async archiveDocuments(
sourceCollection: CollectionNames,
archiveCollection: CollectionNames,
cutoffDate: Date,
batchSize: number = 1000
): Promise<number> {
let totalArchived = 0;
while (true) {
const batchArchived = await this.withTransaction(async (session) => {
const collection = this.client.getCollection(sourceCollection);
const archiveCol = this.client.getCollection(archiveCollection);
// Find old documents
const documents = await collection.find(
{ created_at: { $lt: cutoffDate } },
{ limit: batchSize, session }
).toArray();
if (documents.length === 0) {
return 0;
}
// Add archive metadata
const now = new Date();
const documentsToArchive = documents.map(doc => ({
...doc,
archived_at: now,
archived_from: sourceCollection
}));
// Insert into archive collection
await archiveCol.insertMany(documentsToArchive, { session });
// Remove from source collection
const ids = documents.map(doc => doc._id);
const deleteResult = await collection.deleteMany(
{ _id: { $in: ids } },
{ session }
);
return deleteResult.deletedCount || 0;
});
totalArchived += batchArchived;
if (batchArchived === 0) {
break;
}
this.logger.debug(`Archived batch of ${batchArchived} documents`);
}
this.logger.info(`Archived ${totalArchived} documents from ${sourceCollection} to ${archiveCollection}`);
return totalArchived;
}
}

View file

@ -1,215 +1,215 @@
import * as yup from 'yup';
import type { ObjectId } from 'mongodb';
/**
* MongoDB Client Configuration
*/
export interface MongoDBClientConfig {
host: string;
port: number;
database: string;
username?: string;
password?: string;
authSource?: string;
uri?: string;
poolSettings?: {
maxPoolSize: number;
minPoolSize: number;
maxIdleTime: number;
};
timeouts?: {
connectTimeout: number;
socketTimeout: number;
serverSelectionTimeout: number;
};
tls?: {
enabled: boolean;
insecure: boolean;
caFile?: string;
};
options?: {
retryWrites: boolean;
journal: boolean;
readPreference: 'primary' | 'primaryPreferred' | 'secondary' | 'secondaryPreferred' | 'nearest';
writeConcern: string;
};
}
/**
* MongoDB Connection Options
*/
export interface MongoDBConnectionOptions {
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type MongoDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface MongoDBHealthCheck {
status: MongoDBHealthStatus;
timestamp: Date;
latency: number;
connections: {
active: number;
available: number;
total: number;
};
errors?: string[];
}
export interface MongoDBMetrics {
operationsPerSecond: number;
averageLatency: number;
errorRate: number;
connectionPoolUtilization: number;
documentsProcessed: number;
}
/**
* Collection Names
*/
export type CollectionNames =
| 'sentiment_data'
| 'raw_documents'
| 'news_articles'
| 'sec_filings'
| 'earnings_transcripts'
| 'analyst_reports'
| 'social_media_posts'
| 'market_events'
| 'economic_indicators';
/**
* Base Document Interface
*/
export interface DocumentBase {
_id?: ObjectId;
created_at: Date;
updated_at: Date;
source: string;
metadata?: Record<string, any>;
}
/**
* Sentiment Data Document
*/
export interface SentimentData extends DocumentBase {
symbol: string;
sentiment_score: number;
sentiment_label: 'positive' | 'negative' | 'neutral';
confidence: number;
text: string;
source_type: 'reddit' | 'twitter' | 'news' | 'forums';
source_id: string;
timestamp: Date;
processed_at: Date;
language: string;
keywords: string[];
entities: Array<{
name: string;
type: string;
confidence: number;
}>;
}
/**
* Raw Document
*/
export interface RawDocument extends DocumentBase {
document_type: 'html' | 'pdf' | 'text' | 'json' | 'xml';
content: string;
content_hash: string;
url?: string;
title?: string;
author?: string;
published_date?: Date;
extracted_text?: string;
processing_status: 'pending' | 'processed' | 'failed';
size_bytes: number;
language?: string;
}
/**
* News Article
*/
export interface NewsArticle extends DocumentBase {
headline: string;
content: string;
summary?: string;
author: string;
publication: string;
published_date: Date;
url: string;
symbols: string[];
categories: string[];
sentiment_score?: number;
relevance_score?: number;
image_url?: string;
tags: string[];
}
/**
* SEC Filing
*/
export interface SecFiling extends DocumentBase {
cik: string;
accession_number: string;
filing_type: string;
company_name: string;
symbols: string[];
filing_date: Date;
period_end_date: Date;
url: string;
content: string;
extracted_data?: Record<string, any>;
financial_statements?: Array<{
statement_type: string;
data: Record<string, number>;
}>;
processing_status: 'pending' | 'processed' | 'failed';
}
/**
* Earnings Transcript
*/
export interface EarningsTranscript extends DocumentBase {
symbol: string;
company_name: string;
quarter: string;
year: number;
call_date: Date;
transcript: string;
participants: Array<{
name: string;
title: string;
type: 'executive' | 'analyst';
}>;
key_topics: string[];
sentiment_analysis?: {
overall_sentiment: number;
topic_sentiments: Record<string, number>;
};
financial_highlights?: Record<string, number>;
}
/**
* Analyst Report
*/
export interface AnalystReport extends DocumentBase {
symbol: string;
analyst_firm: string;
analyst_name: string;
report_title: string;
report_date: Date;
rating: 'buy' | 'hold' | 'sell' | 'strong_buy' | 'strong_sell';
price_target?: number;
previous_rating?: string;
content: string;
summary: string;
key_points: string[];
financial_projections?: Record<string, number>;
}
import * as yup from 'yup';
import type { ObjectId } from 'mongodb';
/**
* MongoDB Client Configuration
*/
export interface MongoDBClientConfig {
host: string;
port: number;
database: string;
username?: string;
password?: string;
authSource?: string;
uri?: string;
poolSettings?: {
maxPoolSize: number;
minPoolSize: number;
maxIdleTime: number;
};
timeouts?: {
connectTimeout: number;
socketTimeout: number;
serverSelectionTimeout: number;
};
tls?: {
enabled: boolean;
insecure: boolean;
caFile?: string;
};
options?: {
retryWrites: boolean;
journal: boolean;
readPreference: 'primary' | 'primaryPreferred' | 'secondary' | 'secondaryPreferred' | 'nearest';
writeConcern: string;
};
}
/**
* MongoDB Connection Options
*/
export interface MongoDBConnectionOptions {
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type MongoDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface MongoDBHealthCheck {
status: MongoDBHealthStatus;
timestamp: Date;
latency: number;
connections: {
active: number;
available: number;
total: number;
};
errors?: string[];
}
export interface MongoDBMetrics {
operationsPerSecond: number;
averageLatency: number;
errorRate: number;
connectionPoolUtilization: number;
documentsProcessed: number;
}
/**
* Collection Names
*/
export type CollectionNames =
| 'sentiment_data'
| 'raw_documents'
| 'news_articles'
| 'sec_filings'
| 'earnings_transcripts'
| 'analyst_reports'
| 'social_media_posts'
| 'market_events'
| 'economic_indicators';
/**
* Base Document Interface
*/
export interface DocumentBase {
_id?: ObjectId;
created_at: Date;
updated_at: Date;
source: string;
metadata?: Record<string, any>;
}
/**
* Sentiment Data Document
*/
export interface SentimentData extends DocumentBase {
symbol: string;
sentiment_score: number;
sentiment_label: 'positive' | 'negative' | 'neutral';
confidence: number;
text: string;
source_type: 'reddit' | 'twitter' | 'news' | 'forums';
source_id: string;
timestamp: Date;
processed_at: Date;
language: string;
keywords: string[];
entities: Array<{
name: string;
type: string;
confidence: number;
}>;
}
/**
* Raw Document
*/
export interface RawDocument extends DocumentBase {
document_type: 'html' | 'pdf' | 'text' | 'json' | 'xml';
content: string;
content_hash: string;
url?: string;
title?: string;
author?: string;
published_date?: Date;
extracted_text?: string;
processing_status: 'pending' | 'processed' | 'failed';
size_bytes: number;
language?: string;
}
/**
* News Article
*/
export interface NewsArticle extends DocumentBase {
headline: string;
content: string;
summary?: string;
author: string;
publication: string;
published_date: Date;
url: string;
symbols: string[];
categories: string[];
sentiment_score?: number;
relevance_score?: number;
image_url?: string;
tags: string[];
}
/**
* SEC Filing
*/
export interface SecFiling extends DocumentBase {
cik: string;
accession_number: string;
filing_type: string;
company_name: string;
symbols: string[];
filing_date: Date;
period_end_date: Date;
url: string;
content: string;
extracted_data?: Record<string, any>;
financial_statements?: Array<{
statement_type: string;
data: Record<string, number>;
}>;
processing_status: 'pending' | 'processed' | 'failed';
}
/**
* Earnings Transcript
*/
export interface EarningsTranscript extends DocumentBase {
symbol: string;
company_name: string;
quarter: string;
year: number;
call_date: Date;
transcript: string;
participants: Array<{
name: string;
title: string;
type: 'executive' | 'analyst';
}>;
key_topics: string[];
sentiment_analysis?: {
overall_sentiment: number;
topic_sentiments: Record<string, number>;
};
financial_highlights?: Record<string, number>;
}
/**
* Analyst Report
*/
export interface AnalystReport extends DocumentBase {
symbol: string;
analyst_firm: string;
analyst_name: string;
report_title: string;
report_date: Date;
rating: 'buy' | 'hold' | 'sell' | 'strong_buy' | 'strong_sell';
price_target?: number;
previous_rating?: string;
content: string;
summary: string;
key_points: string[];
financial_projections?: Record<string, number>;
}

View file

@ -1,13 +1,13 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" },
{ "path": "../logger" }
]
}
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" },
{ "path": "../logger" }
]
}

View file

@ -1,10 +1,10 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}

View file

@ -1,82 +1,82 @@
# PostgreSQL Client Library
A comprehensive PostgreSQL client library for the Stock Bot trading platform, designed for operational data, transactions, and relational queries.
## Features
- **Connection Pooling**: Robust connection pool management
- **Type Safety**: Full TypeScript support with typed queries
- **Transaction Support**: Multi-statement transactions with rollback
- **Schema Management**: Database schema validation and migrations
- **Query Builder**: Fluent query building interface
- **Health Monitoring**: Connection health monitoring and metrics
- **Performance Tracking**: Query performance monitoring and optimization
## Usage
```typescript
import { PostgreSQLClient } from '@stock-bot/postgres-client';
// Initialize client
const pgClient = new PostgreSQLClient();
await pgClient.connect();
// Execute a query
const users = await pgClient.query('SELECT * FROM users WHERE active = $1', [true]);
// Use query builder
const trades = await pgClient
.select('*')
.from('trades')
.where('symbol', '=', 'AAPL')
.orderBy('created_at', 'DESC')
.limit(10)
.execute();
// Execute in transaction
await pgClient.transaction(async (tx) => {
await tx.query('INSERT INTO trades (...) VALUES (...)', []);
await tx.query('UPDATE portfolio SET balance = balance - $1', [amount]);
});
```
## Database Schemas
The client provides typed access to the following schemas:
- **trading**: Core trading operations (trades, orders, positions)
- **strategy**: Strategy definitions and performance
- **risk**: Risk management and compliance
- **audit**: Audit trails and logging
## Configuration
Configure using environment variables:
```env
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_DATABASE=stockbot
POSTGRES_USERNAME=stockbot
POSTGRES_PASSWORD=your_password
```
## Query Builder
The fluent query builder supports:
- SELECT, INSERT, UPDATE, DELETE operations
- Complex WHERE conditions with AND/OR logic
- JOINs (INNER, LEFT, RIGHT, FULL)
- Aggregations (COUNT, SUM, AVG, etc.)
- Subqueries and CTEs
- Window functions
## Health Monitoring
The client includes built-in health monitoring:
```typescript
const health = await pgClient.getHealth();
console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy'
```
# PostgreSQL Client Library
A comprehensive PostgreSQL client library for the Stock Bot trading platform, designed for operational data, transactions, and relational queries.
## Features
- **Connection Pooling**: Robust connection pool management
- **Type Safety**: Full TypeScript support with typed queries
- **Transaction Support**: Multi-statement transactions with rollback
- **Schema Management**: Database schema validation and migrations
- **Query Builder**: Fluent query building interface
- **Health Monitoring**: Connection health monitoring and metrics
- **Performance Tracking**: Query performance monitoring and optimization
## Usage
```typescript
import { PostgreSQLClient } from '@stock-bot/postgres-client';
// Initialize client
const pgClient = new PostgreSQLClient();
await pgClient.connect();
// Execute a query
const users = await pgClient.query('SELECT * FROM users WHERE active = $1', [true]);
// Use query builder
const trades = await pgClient
.select('*')
.from('trades')
.where('symbol', '=', 'AAPL')
.orderBy('created_at', 'DESC')
.limit(10)
.execute();
// Execute in transaction
await pgClient.transaction(async (tx) => {
await tx.query('INSERT INTO trades (...) VALUES (...)', []);
await tx.query('UPDATE portfolio SET balance = balance - $1', [amount]);
});
```
## Database Schemas
The client provides typed access to the following schemas:
- **trading**: Core trading operations (trades, orders, positions)
- **strategy**: Strategy definitions and performance
- **risk**: Risk management and compliance
- **audit**: Audit trails and logging
## Configuration
Configure using environment variables:
```env
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_DATABASE=stockbot
POSTGRES_USERNAME=stockbot
POSTGRES_PASSWORD=your_password
```
## Query Builder
The fluent query builder supports:
- SELECT, INSERT, UPDATE, DELETE operations
- Complex WHERE conditions with AND/OR logic
- JOINs (INNER, LEFT, RIGHT, FULL)
- Aggregations (COUNT, SUM, AVG, etc.)
- Subqueries and CTEs
- Window functions
## Health Monitoring
The client includes built-in health monitoring:
```typescript
const health = await pgClient.getHealth();
console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy'
```

View file

@ -1,47 +1,47 @@
{
"name": "@stock-bot/postgres-client",
"version": "1.0.0",
"description": "PostgreSQL client library for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": { "@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"pg": "^8.11.3",
"yup": "^1.6.1"
},
"devDependencies": {
"@types/node": "^20.11.0",
"@types/pg": "^8.10.7",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"keywords": [
"postgresql",
"database",
"client",
"stock-bot"
],
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}
{
"name": "@stock-bot/postgres-client",
"version": "1.0.0",
"description": "PostgreSQL client library for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": { "@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*",
"pg": "^8.11.3",
"yup": "^1.6.1"
},
"devDependencies": {
"@types/node": "^20.11.0",
"@types/pg": "^8.10.7",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"keywords": [
"postgresql",
"database",
"client",
"stock-bot"
],
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}

View file

@ -1,339 +1,339 @@
import { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg';
import { postgresConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import type {
PostgreSQLClientConfig,
PostgreSQLConnectionOptions,
QueryResult,
TransactionCallback
} from './types';
import { PostgreSQLHealthMonitor } from './health';
import { PostgreSQLQueryBuilder } from './query-builder';
import { PostgreSQLTransactionManager } from './transactions';
/**
* PostgreSQL Client for Stock Bot
*
* Provides type-safe access to PostgreSQL with connection pooling,
* health monitoring, and transaction support.
*/
export class PostgreSQLClient {
private pool: Pool | null = null;
private readonly config: PostgreSQLClientConfig;
private readonly options: PostgreSQLConnectionOptions;
private readonly logger: ReturnType<typeof getLogger>;
private readonly healthMonitor: PostgreSQLHealthMonitor;
private readonly transactionManager: PostgreSQLTransactionManager;
private isConnected = false;
constructor(
config?: Partial<PostgreSQLClientConfig>,
options?: PostgreSQLConnectionOptions
) {
this.config = this.buildConfig(config);
this.options = {
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options
};
this.logger = getLogger('postgres-client');
this.healthMonitor = new PostgreSQLHealthMonitor(this);
this.transactionManager = new PostgreSQLTransactionManager(this);
}
/**
* Connect to PostgreSQL
*/
async connect(): Promise<void> {
if (this.isConnected && this.pool) {
return;
}
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(`Connecting to PostgreSQL (attempt ${attempt}/${this.options.retryAttempts})...`);
this.pool = new Pool(this.buildPoolConfig());
// Test the connection
const client = await this.pool.connect();
await client.query('SELECT 1');
client.release();
this.isConnected = true;
this.logger.info('Successfully connected to PostgreSQL');
// Start health monitoring
this.healthMonitor.start();
// Setup error handlers
this.setupErrorHandlers();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`PostgreSQL connection attempt ${attempt} failed:`, error);
if (this.pool) {
await this.pool.end();
this.pool = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(`Failed to connect to PostgreSQL after ${this.options.retryAttempts} attempts: ${lastError?.message}`);
}
/**
* Disconnect from PostgreSQL
*/
async disconnect(): Promise<void> {
if (!this.pool) {
return;
}
try {
this.healthMonitor.stop();
await this.pool.end();
this.isConnected = false;
this.pool = null;
this.logger.info('Disconnected from PostgreSQL');
} catch (error) {
this.logger.error('Error disconnecting from PostgreSQL:', error);
throw error;
}
}
/**
* Execute a query
*/
async query<T extends QueryResultRow = any>(text: string, params?: any[]): Promise<QueryResult<T>> {
if (!this.pool) {
throw new Error('PostgreSQL client not connected');
}
const startTime = Date.now();
try {
const result = await this.pool.query<T>(text, params);
const executionTime = Date.now() - startTime;
this.logger.debug(`Query executed in ${executionTime}ms`, {
query: text.substring(0, 100),
params: params?.length
});
return {
...result,
executionTime
} as QueryResult<T>;
} catch (error) {
const executionTime = Date.now() - startTime;
this.logger.error(`Query failed after ${executionTime}ms:`, {
error,
query: text,
params
});
throw error;
}
}
/**
* Execute multiple queries in a transaction
*/
async transaction<T>(callback: TransactionCallback<T>): Promise<T> {
return await this.transactionManager.execute(callback);
}
/**
* Get a query builder instance
*/
queryBuilder(): PostgreSQLQueryBuilder {
return new PostgreSQLQueryBuilder(this);
}
/**
* Create a new query builder with SELECT
*/
select(columns: string | string[] = '*'): PostgreSQLQueryBuilder {
return this.queryBuilder().select(columns);
}
/**
* Create a new query builder with INSERT
*/
insert(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().insert(table);
}
/**
* Create a new query builder with UPDATE
*/
update(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().update(table);
}
/**
* Create a new query builder with DELETE
*/
delete(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().delete(table);
}
/**
* Execute a stored procedure or function
*/
async callFunction<T extends QueryResultRow = any>(functionName: string, params?: any[]): Promise<QueryResult<T>> {
const placeholders = params ? params.map((_, i) => `$${i + 1}`).join(', ') : '';
const query = `SELECT * FROM ${functionName}(${placeholders})`;
return await this.query<T>(query, params);
}
/**
* Check if a table exists
*/
async tableExists(tableName: string, schemaName: string = 'public'): Promise<boolean> {
const result = await this.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = $1 AND table_name = $2
)`,
[schemaName, tableName]
);
return result.rows[0].exists;
}
/**
* Get table schema information
*/
async getTableSchema(tableName: string, schemaName: string = 'public'): Promise<any[]> {
const result = await this.query(
`SELECT
column_name,
data_type,
is_nullable,
column_default,
character_maximum_length
FROM information_schema.columns
WHERE table_schema = $1 AND table_name = $2
ORDER BY ordinal_position`,
[schemaName, tableName]
);
return result.rows;
}
/**
* Execute EXPLAIN for query analysis
*/
async explain(query: string, params?: any[]): Promise<any[]> {
const explainQuery = `EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON) ${query}`;
const result = await this.query(explainQuery, params);
return result.rows[0]['QUERY PLAN'];
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
const result = await this.query(`
SELECT
(SELECT count(*) FROM pg_stat_activity WHERE state = 'active') as active_connections,
(SELECT count(*) FROM pg_stat_activity WHERE state = 'idle') as idle_connections,
(SELECT setting FROM pg_settings WHERE name = 'max_connections') as max_connections,
pg_size_pretty(pg_database_size(current_database())) as database_size
`);
return result.rows[0];
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.pool;
}
/**
* Get the underlying connection pool
*/
get connectionPool(): Pool | null {
return this.pool;
}
private buildConfig(config?: Partial<PostgreSQLClientConfig>): PostgreSQLClientConfig {
return {
host: config?.host || postgresConfig.POSTGRES_HOST,
port: config?.port || postgresConfig.POSTGRES_PORT,
database: config?.database || postgresConfig.POSTGRES_DATABASE,
username: config?.username || postgresConfig.POSTGRES_USERNAME,
password: config?.password || postgresConfig.POSTGRES_PASSWORD,
poolSettings: {
min: postgresConfig.POSTGRES_POOL_MIN,
max: postgresConfig.POSTGRES_POOL_MAX,
idleTimeoutMillis: postgresConfig.POSTGRES_POOL_IDLE_TIMEOUT,
...config?.poolSettings
},
ssl: {
enabled: postgresConfig.POSTGRES_SSL,
rejectUnauthorized: postgresConfig.POSTGRES_SSL_REJECT_UNAUTHORIZED,
...config?.ssl
},
timeouts: {
query: postgresConfig.POSTGRES_QUERY_TIMEOUT,
connection: postgresConfig.POSTGRES_CONNECTION_TIMEOUT,
statement: postgresConfig.POSTGRES_STATEMENT_TIMEOUT,
lock: postgresConfig.POSTGRES_LOCK_TIMEOUT,
idleInTransaction: postgresConfig.POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
...config?.timeouts
}
};
}
private buildPoolConfig(): any {
return {
host: this.config.host,
port: this.config.port,
database: this.config.database,
user: this.config.username,
password: this.config.password,
min: this.config.poolSettings?.min,
max: this.config.poolSettings?.max,
idleTimeoutMillis: this.config.poolSettings?.idleTimeoutMillis,
connectionTimeoutMillis: this.config.timeouts?.connection,
query_timeout: this.config.timeouts?.query,
statement_timeout: this.config.timeouts?.statement,
lock_timeout: this.config.timeouts?.lock,
idle_in_transaction_session_timeout: this.config.timeouts?.idleInTransaction,
ssl: this.config.ssl?.enabled ? {
rejectUnauthorized: this.config.ssl.rejectUnauthorized
} : false
};
}
private setupErrorHandlers(): void {
if (!this.pool) return;
this.pool.on('error', (error) => {
this.logger.error('PostgreSQL pool error:', error);
});
this.pool.on('connect', () => {
this.logger.debug('New PostgreSQL client connected');
});
this.pool.on('remove', () => {
this.logger.debug('PostgreSQL client removed from pool');
});
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
import { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg';
import { postgresConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import type {
PostgreSQLClientConfig,
PostgreSQLConnectionOptions,
QueryResult,
TransactionCallback
} from './types';
import { PostgreSQLHealthMonitor } from './health';
import { PostgreSQLQueryBuilder } from './query-builder';
import { PostgreSQLTransactionManager } from './transactions';
/**
* PostgreSQL Client for Stock Bot
*
* Provides type-safe access to PostgreSQL with connection pooling,
* health monitoring, and transaction support.
*/
export class PostgreSQLClient {
private pool: Pool | null = null;
private readonly config: PostgreSQLClientConfig;
private readonly options: PostgreSQLConnectionOptions;
private readonly logger: ReturnType<typeof getLogger>;
private readonly healthMonitor: PostgreSQLHealthMonitor;
private readonly transactionManager: PostgreSQLTransactionManager;
private isConnected = false;
constructor(
config?: Partial<PostgreSQLClientConfig>,
options?: PostgreSQLConnectionOptions
) {
this.config = this.buildConfig(config);
this.options = {
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options
};
this.logger = getLogger('postgres-client');
this.healthMonitor = new PostgreSQLHealthMonitor(this);
this.transactionManager = new PostgreSQLTransactionManager(this);
}
/**
* Connect to PostgreSQL
*/
async connect(): Promise<void> {
if (this.isConnected && this.pool) {
return;
}
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(`Connecting to PostgreSQL (attempt ${attempt}/${this.options.retryAttempts})...`);
this.pool = new Pool(this.buildPoolConfig());
// Test the connection
const client = await this.pool.connect();
await client.query('SELECT 1');
client.release();
this.isConnected = true;
this.logger.info('Successfully connected to PostgreSQL');
// Start health monitoring
this.healthMonitor.start();
// Setup error handlers
this.setupErrorHandlers();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`PostgreSQL connection attempt ${attempt} failed:`, error);
if (this.pool) {
await this.pool.end();
this.pool = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(`Failed to connect to PostgreSQL after ${this.options.retryAttempts} attempts: ${lastError?.message}`);
}
/**
* Disconnect from PostgreSQL
*/
async disconnect(): Promise<void> {
if (!this.pool) {
return;
}
try {
this.healthMonitor.stop();
await this.pool.end();
this.isConnected = false;
this.pool = null;
this.logger.info('Disconnected from PostgreSQL');
} catch (error) {
this.logger.error('Error disconnecting from PostgreSQL:', error);
throw error;
}
}
/**
* Execute a query
*/
async query<T extends QueryResultRow = any>(text: string, params?: any[]): Promise<QueryResult<T>> {
if (!this.pool) {
throw new Error('PostgreSQL client not connected');
}
const startTime = Date.now();
try {
const result = await this.pool.query<T>(text, params);
const executionTime = Date.now() - startTime;
this.logger.debug(`Query executed in ${executionTime}ms`, {
query: text.substring(0, 100),
params: params?.length
});
return {
...result,
executionTime
} as QueryResult<T>;
} catch (error) {
const executionTime = Date.now() - startTime;
this.logger.error(`Query failed after ${executionTime}ms:`, {
error,
query: text,
params
});
throw error;
}
}
/**
* Execute multiple queries in a transaction
*/
async transaction<T>(callback: TransactionCallback<T>): Promise<T> {
return await this.transactionManager.execute(callback);
}
/**
* Get a query builder instance
*/
queryBuilder(): PostgreSQLQueryBuilder {
return new PostgreSQLQueryBuilder(this);
}
/**
* Create a new query builder with SELECT
*/
select(columns: string | string[] = '*'): PostgreSQLQueryBuilder {
return this.queryBuilder().select(columns);
}
/**
* Create a new query builder with INSERT
*/
insert(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().insert(table);
}
/**
* Create a new query builder with UPDATE
*/
update(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().update(table);
}
/**
* Create a new query builder with DELETE
*/
delete(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().delete(table);
}
/**
* Execute a stored procedure or function
*/
async callFunction<T extends QueryResultRow = any>(functionName: string, params?: any[]): Promise<QueryResult<T>> {
const placeholders = params ? params.map((_, i) => `$${i + 1}`).join(', ') : '';
const query = `SELECT * FROM ${functionName}(${placeholders})`;
return await this.query<T>(query, params);
}
/**
* Check if a table exists
*/
async tableExists(tableName: string, schemaName: string = 'public'): Promise<boolean> {
const result = await this.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = $1 AND table_name = $2
)`,
[schemaName, tableName]
);
return result.rows[0].exists;
}
/**
* Get table schema information
*/
async getTableSchema(tableName: string, schemaName: string = 'public'): Promise<any[]> {
const result = await this.query(
`SELECT
column_name,
data_type,
is_nullable,
column_default,
character_maximum_length
FROM information_schema.columns
WHERE table_schema = $1 AND table_name = $2
ORDER BY ordinal_position`,
[schemaName, tableName]
);
return result.rows;
}
/**
* Execute EXPLAIN for query analysis
*/
async explain(query: string, params?: any[]): Promise<any[]> {
const explainQuery = `EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON) ${query}`;
const result = await this.query(explainQuery, params);
return result.rows[0]['QUERY PLAN'];
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
const result = await this.query(`
SELECT
(SELECT count(*) FROM pg_stat_activity WHERE state = 'active') as active_connections,
(SELECT count(*) FROM pg_stat_activity WHERE state = 'idle') as idle_connections,
(SELECT setting FROM pg_settings WHERE name = 'max_connections') as max_connections,
pg_size_pretty(pg_database_size(current_database())) as database_size
`);
return result.rows[0];
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.pool;
}
/**
* Get the underlying connection pool
*/
get connectionPool(): Pool | null {
return this.pool;
}
private buildConfig(config?: Partial<PostgreSQLClientConfig>): PostgreSQLClientConfig {
return {
host: config?.host || postgresConfig.POSTGRES_HOST,
port: config?.port || postgresConfig.POSTGRES_PORT,
database: config?.database || postgresConfig.POSTGRES_DATABASE,
username: config?.username || postgresConfig.POSTGRES_USERNAME,
password: config?.password || postgresConfig.POSTGRES_PASSWORD,
poolSettings: {
min: postgresConfig.POSTGRES_POOL_MIN,
max: postgresConfig.POSTGRES_POOL_MAX,
idleTimeoutMillis: postgresConfig.POSTGRES_POOL_IDLE_TIMEOUT,
...config?.poolSettings
},
ssl: {
enabled: postgresConfig.POSTGRES_SSL,
rejectUnauthorized: postgresConfig.POSTGRES_SSL_REJECT_UNAUTHORIZED,
...config?.ssl
},
timeouts: {
query: postgresConfig.POSTGRES_QUERY_TIMEOUT,
connection: postgresConfig.POSTGRES_CONNECTION_TIMEOUT,
statement: postgresConfig.POSTGRES_STATEMENT_TIMEOUT,
lock: postgresConfig.POSTGRES_LOCK_TIMEOUT,
idleInTransaction: postgresConfig.POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
...config?.timeouts
}
};
}
private buildPoolConfig(): any {
return {
host: this.config.host,
port: this.config.port,
database: this.config.database,
user: this.config.username,
password: this.config.password,
min: this.config.poolSettings?.min,
max: this.config.poolSettings?.max,
idleTimeoutMillis: this.config.poolSettings?.idleTimeoutMillis,
connectionTimeoutMillis: this.config.timeouts?.connection,
query_timeout: this.config.timeouts?.query,
statement_timeout: this.config.timeouts?.statement,
lock_timeout: this.config.timeouts?.lock,
idle_in_transaction_session_timeout: this.config.timeouts?.idleInTransaction,
ssl: this.config.ssl?.enabled ? {
rejectUnauthorized: this.config.ssl.rejectUnauthorized
} : false
};
}
private setupErrorHandlers(): void {
if (!this.pool) return;
this.pool.on('error', (error) => {
this.logger.error('PostgreSQL pool error:', error);
});
this.pool.on('connect', () => {
this.logger.debug('New PostgreSQL client connected');
});
this.pool.on('remove', () => {
this.logger.debug('PostgreSQL client removed from pool');
});
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}

View file

@ -1,64 +1,64 @@
import { PostgreSQLClient } from './client';
import { postgresConfig } from '@stock-bot/config';
import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions } from './types';
/**
* Factory function to create a PostgreSQL client instance
*/
export function createPostgreSQLClient(
config?: Partial<PostgreSQLClientConfig>,
options?: PostgreSQLConnectionOptions
): PostgreSQLClient {
return new PostgreSQLClient(config, options);
}
/**
* Create a PostgreSQL client with default configuration
*/
export function createDefaultPostgreSQLClient(): PostgreSQLClient {
const config: Partial<PostgreSQLClientConfig> = {
host: postgresConfig.POSTGRES_HOST,
port: postgresConfig.POSTGRES_PORT,
database: postgresConfig.POSTGRES_DATABASE,
username: postgresConfig.POSTGRES_USERNAME,
password: postgresConfig.POSTGRES_PASSWORD
};
return new PostgreSQLClient(config);
}
/**
* Singleton PostgreSQL client instance
*/
let defaultClient: PostgreSQLClient | null = null;
/**
* Get or create the default PostgreSQL client instance
*/
export function getPostgreSQLClient(): PostgreSQLClient {
if (!defaultClient) {
defaultClient = createDefaultPostgreSQLClient();
}
return defaultClient;
}
/**
* Connect to PostgreSQL using the default client
*/
export async function connectPostgreSQL(): Promise<PostgreSQLClient> {
const client = getPostgreSQLClient();
if (!client.connected) {
await client.connect();
}
return client;
}
/**
* Disconnect from PostgreSQL
*/
export async function disconnectPostgreSQL(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}
import { PostgreSQLClient } from './client';
import { postgresConfig } from '@stock-bot/config';
import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions } from './types';
/**
* Factory function to create a PostgreSQL client instance
*/
export function createPostgreSQLClient(
config?: Partial<PostgreSQLClientConfig>,
options?: PostgreSQLConnectionOptions
): PostgreSQLClient {
return new PostgreSQLClient(config, options);
}
/**
* Create a PostgreSQL client with default configuration
*/
export function createDefaultPostgreSQLClient(): PostgreSQLClient {
const config: Partial<PostgreSQLClientConfig> = {
host: postgresConfig.POSTGRES_HOST,
port: postgresConfig.POSTGRES_PORT,
database: postgresConfig.POSTGRES_DATABASE,
username: postgresConfig.POSTGRES_USERNAME,
password: postgresConfig.POSTGRES_PASSWORD
};
return new PostgreSQLClient(config);
}
/**
* Singleton PostgreSQL client instance
*/
let defaultClient: PostgreSQLClient | null = null;
/**
* Get or create the default PostgreSQL client instance
*/
export function getPostgreSQLClient(): PostgreSQLClient {
if (!defaultClient) {
defaultClient = createDefaultPostgreSQLClient();
}
return defaultClient;
}
/**
* Connect to PostgreSQL using the default client
*/
export async function connectPostgreSQL(): Promise<PostgreSQLClient> {
const client = getPostgreSQLClient();
if (!client.connected) {
await client.connect();
}
return client;
}
/**
* Disconnect from PostgreSQL
*/
export async function disconnectPostgreSQL(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}

View file

@ -1,142 +1,142 @@
import { getLogger } from '@stock-bot/logger';
import type { PostgreSQLClient } from './client';
import type { PostgreSQLHealthCheck, PostgreSQLHealthStatus, PostgreSQLMetrics } from './types';
/**
* PostgreSQL Health Monitor
*
* Monitors PostgreSQL connection health and provides metrics
*/
export class PostgreSQLHealthMonitor {
private readonly client: PostgreSQLClient;
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private metrics: PostgreSQLMetrics;
private lastHealthCheck: PostgreSQLHealthCheck | null = null;
constructor(client: PostgreSQLClient) {
this.client = client;
this.logger = getLogger('postgres-health-monitor');
this.metrics = {
queriesPerSecond: 0,
averageQueryTime: 0,
errorRate: 0,
connectionPoolUtilization: 0,
slowQueries: 0
};
}
/**
* Start health monitoring
*/
start(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stop();
}
this.logger.info(`Starting PostgreSQL health monitoring (interval: ${intervalMs}ms)`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed:', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed:', error);
});
}
/**
* Stop health monitoring
*/
stop(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Stopped PostgreSQL health monitoring');
}
}
/**
* Get current health status
*/
async getHealth(): Promise<PostgreSQLHealthCheck> {
if (!this.lastHealthCheck) {
await this.performHealthCheck();
}
return this.lastHealthCheck!;
}
/**
* Get current metrics
*/
getMetrics(): PostgreSQLMetrics {
return { ...this.metrics };
}
/**
* Perform a health check
*/
private async performHealthCheck(): Promise<void> {
const startTime = Date.now();
const errors: string[] = [];
let status: PostgreSQLHealthStatus = 'healthy';
try {
if (!this.client.connected) {
errors.push('PostgreSQL client not connected');
status = 'unhealthy';
} else {
// Test basic connectivity
await this.client.query('SELECT 1');
// Get connection stats
const stats = await this.client.getStats();
// Check connection pool utilization
const utilization = parseInt(stats.active_connections) / parseInt(stats.max_connections);
if (utilization > 0.8) {
errors.push('High connection pool utilization');
status = status === 'healthy' ? 'degraded' : status;
}
// Check for high latency
const latency = Date.now() - startTime;
if (latency > 1000) {
errors.push(`High latency: ${latency}ms`);
status = status === 'healthy' ? 'degraded' : status;
}
this.metrics.connectionPoolUtilization = utilization;
}
} catch (error) {
errors.push(`Health check failed: ${(error as Error).message}`);
status = 'unhealthy';
}
const latency = Date.now() - startTime;
this.lastHealthCheck = {
status,
timestamp: new Date(),
latency,
connections: {
active: 1,
idle: 9,
total: 10
},
errors: errors.length > 0 ? errors : undefined
};
// Log health status changes
if (status !== 'healthy') {
this.logger.warn(`PostgreSQL health status: ${status}`, { errors, latency });
} else {
this.logger.debug(`PostgreSQL health check passed (${latency}ms)`);
}
}
}
import { getLogger } from '@stock-bot/logger';
import type { PostgreSQLClient } from './client';
import type { PostgreSQLHealthCheck, PostgreSQLHealthStatus, PostgreSQLMetrics } from './types';
/**
* PostgreSQL Health Monitor
*
* Monitors PostgreSQL connection health and provides metrics
*/
export class PostgreSQLHealthMonitor {
private readonly client: PostgreSQLClient;
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private metrics: PostgreSQLMetrics;
private lastHealthCheck: PostgreSQLHealthCheck | null = null;
constructor(client: PostgreSQLClient) {
this.client = client;
this.logger = getLogger('postgres-health-monitor');
this.metrics = {
queriesPerSecond: 0,
averageQueryTime: 0,
errorRate: 0,
connectionPoolUtilization: 0,
slowQueries: 0
};
}
/**
* Start health monitoring
*/
start(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stop();
}
this.logger.info(`Starting PostgreSQL health monitoring (interval: ${intervalMs}ms)`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed:', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed:', error);
});
}
/**
* Stop health monitoring
*/
stop(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Stopped PostgreSQL health monitoring');
}
}
/**
* Get current health status
*/
async getHealth(): Promise<PostgreSQLHealthCheck> {
if (!this.lastHealthCheck) {
await this.performHealthCheck();
}
return this.lastHealthCheck!;
}
/**
* Get current metrics
*/
getMetrics(): PostgreSQLMetrics {
return { ...this.metrics };
}
/**
* Perform a health check
*/
private async performHealthCheck(): Promise<void> {
const startTime = Date.now();
const errors: string[] = [];
let status: PostgreSQLHealthStatus = 'healthy';
try {
if (!this.client.connected) {
errors.push('PostgreSQL client not connected');
status = 'unhealthy';
} else {
// Test basic connectivity
await this.client.query('SELECT 1');
// Get connection stats
const stats = await this.client.getStats();
// Check connection pool utilization
const utilization = parseInt(stats.active_connections) / parseInt(stats.max_connections);
if (utilization > 0.8) {
errors.push('High connection pool utilization');
status = status === 'healthy' ? 'degraded' : status;
}
// Check for high latency
const latency = Date.now() - startTime;
if (latency > 1000) {
errors.push(`High latency: ${latency}ms`);
status = status === 'healthy' ? 'degraded' : status;
}
this.metrics.connectionPoolUtilization = utilization;
}
} catch (error) {
errors.push(`Health check failed: ${(error as Error).message}`);
status = 'unhealthy';
}
const latency = Date.now() - startTime;
this.lastHealthCheck = {
status,
timestamp: new Date(),
latency,
connections: {
active: 1,
idle: 9,
total: 10
},
errors: errors.length > 0 ? errors : undefined
};
// Log health status changes
if (status !== 'healthy') {
this.logger.warn(`PostgreSQL health status: ${status}`, { errors, latency });
} else {
this.logger.debug(`PostgreSQL health check passed (${latency}ms)`);
}
}
}

View file

@ -1,34 +1,34 @@
/**
* PostgreSQL Client Library for Stock Bot
*
* Provides type-safe PostgreSQL access for operational data,
* transactions, and relational queries.
*/
export { PostgreSQLClient } from './client';
export { PostgreSQLHealthMonitor } from './health';
export { PostgreSQLTransactionManager } from './transactions';
export { PostgreSQLQueryBuilder } from './query-builder';
// export { PostgreSQLMigrationManager } from './migrations'; // TODO: Implement migrations
// Types
export type {
PostgreSQLClientConfig,
PostgreSQLConnectionOptions,
PostgreSQLHealthStatus,
PostgreSQLMetrics,
QueryResult,
TransactionCallback,
SchemaNames,
TableNames,
Trade,
Order,
Position,
Portfolio,
Strategy,
RiskLimit,
AuditLog
} from './types';
// Utils
export { createPostgreSQLClient, getPostgreSQLClient } from './factory';
/**
* PostgreSQL Client Library for Stock Bot
*
* Provides type-safe PostgreSQL access for operational data,
* transactions, and relational queries.
*/
export { PostgreSQLClient } from './client';
export { PostgreSQLHealthMonitor } from './health';
export { PostgreSQLTransactionManager } from './transactions';
export { PostgreSQLQueryBuilder } from './query-builder';
// export { PostgreSQLMigrationManager } from './migrations'; // TODO: Implement migrations
// Types
export type {
PostgreSQLClientConfig,
PostgreSQLConnectionOptions,
PostgreSQLHealthStatus,
PostgreSQLMetrics,
QueryResult,
TransactionCallback,
SchemaNames,
TableNames,
Trade,
Order,
Position,
Portfolio,
Strategy,
RiskLimit,
AuditLog
} from './types';
// Utils
export { createPostgreSQLClient, getPostgreSQLClient } from './factory';

View file

@ -1,268 +1,268 @@
import type { QueryResultRow } from 'pg';
import type { PostgreSQLClient } from './client';
import type { WhereCondition, JoinCondition, OrderByCondition, QueryResult } from './types';
/**
* PostgreSQL Query Builder
*
* Provides a fluent interface for building SQL queries
*/
export class PostgreSQLQueryBuilder {
private queryType: 'SELECT' | 'INSERT' | 'UPDATE' | 'DELETE' | null = null;
private selectColumns: string[] = [];
private fromTable: string = '';
private joins: JoinCondition[] = [];
private whereConditions: WhereCondition[] = [];
private groupByColumns: string[] = [];
private havingConditions: WhereCondition[] = [];
private orderByConditions: OrderByCondition[] = [];
private limitCount: number | null = null;
private offsetCount: number | null = null;
private insertValues: Record<string, any> = {};
private updateValues: Record<string, any> = {};
private readonly client: PostgreSQLClient;
constructor(client: PostgreSQLClient) {
this.client = client;
}
/**
* SELECT statement
*/
select(columns: string | string[] = '*'): this {
this.queryType = 'SELECT';
this.selectColumns = Array.isArray(columns) ? columns : [columns];
return this;
}
/**
* FROM clause
*/
from(table: string): this {
this.fromTable = table;
return this;
}
/**
* JOIN clause
*/
join(table: string, on: string, type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL' = 'INNER'): this {
this.joins.push({ type, table, on });
return this;
}
/**
* WHERE clause
*/
where(column: string, operator: string, value?: any): this {
this.whereConditions.push({ column, operator: operator as any, value });
return this;
}
/**
* GROUP BY clause
*/
groupBy(columns: string | string[]): this {
this.groupByColumns = Array.isArray(columns) ? columns : [columns];
return this;
}
/**
* ORDER BY clause
*/
orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): this {
this.orderByConditions.push({ column, direction });
return this;
}
/**
* LIMIT clause
*/
limit(count: number): this {
this.limitCount = count;
return this;
}
/**
* OFFSET clause
*/
offset(count: number): this {
this.offsetCount = count;
return this;
}
/**
* INSERT statement
*/
insert(table: string): this {
this.queryType = 'INSERT';
this.fromTable = table;
return this;
}
/**
* VALUES for INSERT
*/
values(data: Record<string, any>): this {
this.insertValues = data;
return this;
}
/**
* UPDATE statement
*/
update(table: string): this {
this.queryType = 'UPDATE';
this.fromTable = table;
return this;
}
/**
* SET for UPDATE
*/
set(data: Record<string, any>): this {
this.updateValues = data;
return this;
}
/**
* DELETE statement
*/
delete(table: string): this {
this.queryType = 'DELETE';
this.fromTable = table;
return this;
}
/**
* Build and execute the query
*/
async execute<T extends QueryResultRow = any>(): Promise<QueryResult<T>> {
const { sql, params } = this.build();
return await this.client.query<T>(sql, params);
}
/**
* Build the SQL query
*/
build(): { sql: string; params: any[] } {
const params: any[] = [];
let sql = '';
switch (this.queryType) {
case 'SELECT':
sql = this.buildSelectQuery(params);
break;
case 'INSERT':
sql = this.buildInsertQuery(params);
break;
case 'UPDATE':
sql = this.buildUpdateQuery(params);
break;
case 'DELETE':
sql = this.buildDeleteQuery(params);
break;
default:
throw new Error('Query type not specified');
}
return { sql, params };
}
private buildSelectQuery(params: any[]): string {
let sql = `SELECT ${this.selectColumns.join(', ')}`;
if (this.fromTable) {
sql += ` FROM ${this.fromTable}`;
}
// Add JOINs
for (const join of this.joins) {
sql += ` ${join.type} JOIN ${join.table} ON ${join.on}`;
}
// Add WHERE
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
// Add GROUP BY
if (this.groupByColumns.length > 0) {
sql += ` GROUP BY ${this.groupByColumns.join(', ')}`;
}
// Add HAVING
if (this.havingConditions.length > 0) {
sql += ' HAVING ' + this.buildWhereClause(this.havingConditions, params);
}
// Add ORDER BY
if (this.orderByConditions.length > 0) {
const orderBy = this.orderByConditions
.map(order => `${order.column} ${order.direction}`)
.join(', ');
sql += ` ORDER BY ${orderBy}`;
}
// Add LIMIT
if (this.limitCount !== null) {
sql += ` LIMIT $${params.length + 1}`;
params.push(this.limitCount);
}
// Add OFFSET
if (this.offsetCount !== null) {
sql += ` OFFSET $${params.length + 1}`;
params.push(this.offsetCount);
}
return sql;
}
private buildInsertQuery(params: any[]): string {
const columns = Object.keys(this.insertValues);
const placeholders = columns.map((_, i) => `$${params.length + i + 1}`);
params.push(...Object.values(this.insertValues));
return `INSERT INTO ${this.fromTable} (${columns.join(', ')}) VALUES (${placeholders.join(', ')})`;
}
private buildUpdateQuery(params: any[]): string {
const sets = Object.keys(this.updateValues).map((key, i) => {
return `${key} = $${params.length + i + 1}`;
});
params.push(...Object.values(this.updateValues));
let sql = `UPDATE ${this.fromTable} SET ${sets.join(', ')}`;
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
return sql;
}
private buildDeleteQuery(params: any[]): string {
let sql = `DELETE FROM ${this.fromTable}`;
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
return sql;
}
private buildWhereClause(conditions: WhereCondition[], params: any[]): string {
return conditions.map(condition => {
if (condition.operator === 'IS NULL' || condition.operator === 'IS NOT NULL') {
return `${condition.column} ${condition.operator}`;
} else {
params.push(condition.value);
return `${condition.column} ${condition.operator} $${params.length}`;
}
}).join(' AND ');
}
}
import type { QueryResultRow } from 'pg';
import type { PostgreSQLClient } from './client';
import type { WhereCondition, JoinCondition, OrderByCondition, QueryResult } from './types';
/**
* PostgreSQL Query Builder
*
* Provides a fluent interface for building SQL queries
*/
export class PostgreSQLQueryBuilder {
private queryType: 'SELECT' | 'INSERT' | 'UPDATE' | 'DELETE' | null = null;
private selectColumns: string[] = [];
private fromTable: string = '';
private joins: JoinCondition[] = [];
private whereConditions: WhereCondition[] = [];
private groupByColumns: string[] = [];
private havingConditions: WhereCondition[] = [];
private orderByConditions: OrderByCondition[] = [];
private limitCount: number | null = null;
private offsetCount: number | null = null;
private insertValues: Record<string, any> = {};
private updateValues: Record<string, any> = {};
private readonly client: PostgreSQLClient;
constructor(client: PostgreSQLClient) {
this.client = client;
}
/**
* SELECT statement
*/
select(columns: string | string[] = '*'): this {
this.queryType = 'SELECT';
this.selectColumns = Array.isArray(columns) ? columns : [columns];
return this;
}
/**
* FROM clause
*/
from(table: string): this {
this.fromTable = table;
return this;
}
/**
* JOIN clause
*/
join(table: string, on: string, type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL' = 'INNER'): this {
this.joins.push({ type, table, on });
return this;
}
/**
* WHERE clause
*/
where(column: string, operator: string, value?: any): this {
this.whereConditions.push({ column, operator: operator as any, value });
return this;
}
/**
* GROUP BY clause
*/
groupBy(columns: string | string[]): this {
this.groupByColumns = Array.isArray(columns) ? columns : [columns];
return this;
}
/**
* ORDER BY clause
*/
orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): this {
this.orderByConditions.push({ column, direction });
return this;
}
/**
* LIMIT clause
*/
limit(count: number): this {
this.limitCount = count;
return this;
}
/**
* OFFSET clause
*/
offset(count: number): this {
this.offsetCount = count;
return this;
}
/**
* INSERT statement
*/
insert(table: string): this {
this.queryType = 'INSERT';
this.fromTable = table;
return this;
}
/**
* VALUES for INSERT
*/
values(data: Record<string, any>): this {
this.insertValues = data;
return this;
}
/**
* UPDATE statement
*/
update(table: string): this {
this.queryType = 'UPDATE';
this.fromTable = table;
return this;
}
/**
* SET for UPDATE
*/
set(data: Record<string, any>): this {
this.updateValues = data;
return this;
}
/**
* DELETE statement
*/
delete(table: string): this {
this.queryType = 'DELETE';
this.fromTable = table;
return this;
}
/**
* Build and execute the query
*/
async execute<T extends QueryResultRow = any>(): Promise<QueryResult<T>> {
const { sql, params } = this.build();
return await this.client.query<T>(sql, params);
}
/**
* Build the SQL query
*/
build(): { sql: string; params: any[] } {
const params: any[] = [];
let sql = '';
switch (this.queryType) {
case 'SELECT':
sql = this.buildSelectQuery(params);
break;
case 'INSERT':
sql = this.buildInsertQuery(params);
break;
case 'UPDATE':
sql = this.buildUpdateQuery(params);
break;
case 'DELETE':
sql = this.buildDeleteQuery(params);
break;
default:
throw new Error('Query type not specified');
}
return { sql, params };
}
private buildSelectQuery(params: any[]): string {
let sql = `SELECT ${this.selectColumns.join(', ')}`;
if (this.fromTable) {
sql += ` FROM ${this.fromTable}`;
}
// Add JOINs
for (const join of this.joins) {
sql += ` ${join.type} JOIN ${join.table} ON ${join.on}`;
}
// Add WHERE
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
// Add GROUP BY
if (this.groupByColumns.length > 0) {
sql += ` GROUP BY ${this.groupByColumns.join(', ')}`;
}
// Add HAVING
if (this.havingConditions.length > 0) {
sql += ' HAVING ' + this.buildWhereClause(this.havingConditions, params);
}
// Add ORDER BY
if (this.orderByConditions.length > 0) {
const orderBy = this.orderByConditions
.map(order => `${order.column} ${order.direction}`)
.join(', ');
sql += ` ORDER BY ${orderBy}`;
}
// Add LIMIT
if (this.limitCount !== null) {
sql += ` LIMIT $${params.length + 1}`;
params.push(this.limitCount);
}
// Add OFFSET
if (this.offsetCount !== null) {
sql += ` OFFSET $${params.length + 1}`;
params.push(this.offsetCount);
}
return sql;
}
private buildInsertQuery(params: any[]): string {
const columns = Object.keys(this.insertValues);
const placeholders = columns.map((_, i) => `$${params.length + i + 1}`);
params.push(...Object.values(this.insertValues));
return `INSERT INTO ${this.fromTable} (${columns.join(', ')}) VALUES (${placeholders.join(', ')})`;
}
private buildUpdateQuery(params: any[]): string {
const sets = Object.keys(this.updateValues).map((key, i) => {
return `${key} = $${params.length + i + 1}`;
});
params.push(...Object.values(this.updateValues));
let sql = `UPDATE ${this.fromTable} SET ${sets.join(', ')}`;
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
return sql;
}
private buildDeleteQuery(params: any[]): string {
let sql = `DELETE FROM ${this.fromTable}`;
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
return sql;
}
private buildWhereClause(conditions: WhereCondition[], params: any[]): string {
return conditions.map(condition => {
if (condition.operator === 'IS NULL' || condition.operator === 'IS NOT NULL') {
return `${condition.column} ${condition.operator}`;
} else {
params.push(condition.value);
return `${condition.column} ${condition.operator} $${params.length}`;
}
}).join(' AND ');
}
}

View file

@ -1,57 +1,57 @@
import { PoolClient } from 'pg';
import { getLogger } from '@stock-bot/logger';
import type { PostgreSQLClient } from './client';
import type { TransactionCallback } from './types';
/**
* PostgreSQL Transaction Manager
*
* Provides transaction support for multi-statement operations
*/
export class PostgreSQLTransactionManager {
private readonly client: PostgreSQLClient;
private readonly logger: ReturnType<typeof getLogger>;
constructor(client: PostgreSQLClient) {
this.client = client;
this.logger = getLogger('postgres-transaction-manager');
}
/**
* Execute operations within a transaction
*/
async execute<T>(callback: TransactionCallback<T>): Promise<T> {
const pool = this.client.connectionPool;
if (!pool) {
throw new Error('PostgreSQL client not connected');
}
const client = await pool.connect();
try {
this.logger.debug('Starting PostgreSQL transaction');
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
this.logger.debug('PostgreSQL transaction committed successfully');
return result;
} catch (error) {
this.logger.error('PostgreSQL transaction failed, rolling back:', error);
try {
await client.query('ROLLBACK');
} catch (rollbackError) {
this.logger.error('Failed to rollback transaction:', rollbackError);
}
throw error;
} finally {
client.release();
}
}
}
import { PoolClient } from 'pg';
import { getLogger } from '@stock-bot/logger';
import type { PostgreSQLClient } from './client';
import type { TransactionCallback } from './types';
/**
* PostgreSQL Transaction Manager
*
* Provides transaction support for multi-statement operations
*/
export class PostgreSQLTransactionManager {
private readonly client: PostgreSQLClient;
private readonly logger: ReturnType<typeof getLogger>;
constructor(client: PostgreSQLClient) {
this.client = client;
this.logger = getLogger('postgres-transaction-manager');
}
/**
* Execute operations within a transaction
*/
async execute<T>(callback: TransactionCallback<T>): Promise<T> {
const pool = this.client.connectionPool;
if (!pool) {
throw new Error('PostgreSQL client not connected');
}
const client = await pool.connect();
try {
this.logger.debug('Starting PostgreSQL transaction');
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
this.logger.debug('PostgreSQL transaction committed successfully');
return result;
} catch (error) {
this.logger.error('PostgreSQL transaction failed, rolling back:', error);
try {
await client.query('ROLLBACK');
} catch (rollbackError) {
this.logger.error('Failed to rollback transaction:', rollbackError);
}
throw error;
} finally {
client.release();
}
}
}

View file

@ -1,206 +1,206 @@
import type { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg';
/**
* PostgreSQL Client Configuration
*/
export interface PostgreSQLClientConfig {
host: string;
port: number;
database: string;
username: string;
password: string;
poolSettings?: {
min: number;
max: number;
idleTimeoutMillis: number;
};
ssl?: {
enabled: boolean;
rejectUnauthorized: boolean;
};
timeouts?: {
query: number;
connection: number;
statement: number;
lock: number;
idleInTransaction: number;
};
}
/**
* PostgreSQL Connection Options
*/
export interface PostgreSQLConnectionOptions {
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type PostgreSQLHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface PostgreSQLHealthCheck {
status: PostgreSQLHealthStatus;
timestamp: Date;
latency: number;
connections: {
active: number;
idle: number;
total: number;
};
errors?: string[];
}
export interface PostgreSQLMetrics {
queriesPerSecond: number;
averageQueryTime: number;
errorRate: number;
connectionPoolUtilization: number;
slowQueries: number;
}
/**
* Query Result Types
*/
export interface QueryResult<T extends QueryResultRow = any> extends PgQueryResult<T> {
executionTime?: number;
}
export type TransactionCallback<T> = (client: PoolClient) => Promise<T>;
/**
* Schema and Table Names
*/
export type SchemaNames = 'trading' | 'strategy' | 'risk' | 'audit';
export type TableNames =
| 'trades'
| 'orders'
| 'positions'
| 'portfolios'
| 'strategies'
| 'risk_limits'
| 'audit_logs'
| 'users'
| 'accounts'
| 'symbols'
| 'exchanges';
/**
* Trading Domain Types
*/
export interface Trade {
id: string;
order_id: string;
symbol: string;
side: 'buy' | 'sell';
quantity: number;
price: number;
executed_at: Date;
commission: number;
fees: number;
portfolio_id: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
}
export interface Order {
id: string;
symbol: string;
side: 'buy' | 'sell';
type: 'market' | 'limit' | 'stop' | 'stop_limit';
quantity: number;
price?: number;
stop_price?: number;
status: 'pending' | 'filled' | 'cancelled' | 'rejected';
portfolio_id: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
expires_at?: Date;
}
export interface Position {
id: string;
symbol: string;
quantity: number;
average_cost: number;
market_value: number;
unrealized_pnl: number;
realized_pnl: number;
portfolio_id: string;
created_at: Date;
updated_at: Date;
}
export interface Portfolio {
id: string;
name: string;
cash_balance: number;
total_value: number;
unrealized_pnl: number;
realized_pnl: number;
user_id: string;
created_at: Date;
updated_at: Date;
}
export interface Strategy {
id: string;
name: string;
description: string;
parameters: Record<string, any>;
status: 'active' | 'inactive' | 'paused';
performance_metrics: Record<string, number>;
portfolio_id: string;
created_at: Date;
updated_at: Date;
}
export interface RiskLimit {
id: string;
type: 'position_size' | 'daily_loss' | 'max_drawdown' | 'concentration';
value: number;
threshold: number;
status: 'active' | 'breached' | 'disabled';
portfolio_id?: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
}
export interface AuditLog {
id: string;
action: string;
entity_type: string;
entity_id: string;
old_values?: Record<string, any>;
new_values?: Record<string, any>;
user_id?: string;
ip_address?: string;
user_agent?: string;
timestamp: Date;
}
/**
* Query Builder Types
*/
export interface WhereCondition {
column: string;
operator: '=' | '!=' | '>' | '<' | '>=' | '<=' | 'IN' | 'NOT IN' | 'LIKE' | 'ILIKE' | 'IS NULL' | 'IS NOT NULL';
value?: any;
}
export interface JoinCondition {
type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL';
table: string;
on: string;
}
export interface OrderByCondition {
column: string;
direction: 'ASC' | 'DESC';
}
import type { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg';
/**
* PostgreSQL Client Configuration
*/
export interface PostgreSQLClientConfig {
host: string;
port: number;
database: string;
username: string;
password: string;
poolSettings?: {
min: number;
max: number;
idleTimeoutMillis: number;
};
ssl?: {
enabled: boolean;
rejectUnauthorized: boolean;
};
timeouts?: {
query: number;
connection: number;
statement: number;
lock: number;
idleInTransaction: number;
};
}
/**
* PostgreSQL Connection Options
*/
export interface PostgreSQLConnectionOptions {
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type PostgreSQLHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface PostgreSQLHealthCheck {
status: PostgreSQLHealthStatus;
timestamp: Date;
latency: number;
connections: {
active: number;
idle: number;
total: number;
};
errors?: string[];
}
export interface PostgreSQLMetrics {
queriesPerSecond: number;
averageQueryTime: number;
errorRate: number;
connectionPoolUtilization: number;
slowQueries: number;
}
/**
* Query Result Types
*/
export interface QueryResult<T extends QueryResultRow = any> extends PgQueryResult<T> {
executionTime?: number;
}
export type TransactionCallback<T> = (client: PoolClient) => Promise<T>;
/**
* Schema and Table Names
*/
export type SchemaNames = 'trading' | 'strategy' | 'risk' | 'audit';
export type TableNames =
| 'trades'
| 'orders'
| 'positions'
| 'portfolios'
| 'strategies'
| 'risk_limits'
| 'audit_logs'
| 'users'
| 'accounts'
| 'symbols'
| 'exchanges';
/**
* Trading Domain Types
*/
export interface Trade {
id: string;
order_id: string;
symbol: string;
side: 'buy' | 'sell';
quantity: number;
price: number;
executed_at: Date;
commission: number;
fees: number;
portfolio_id: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
}
export interface Order {
id: string;
symbol: string;
side: 'buy' | 'sell';
type: 'market' | 'limit' | 'stop' | 'stop_limit';
quantity: number;
price?: number;
stop_price?: number;
status: 'pending' | 'filled' | 'cancelled' | 'rejected';
portfolio_id: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
expires_at?: Date;
}
export interface Position {
id: string;
symbol: string;
quantity: number;
average_cost: number;
market_value: number;
unrealized_pnl: number;
realized_pnl: number;
portfolio_id: string;
created_at: Date;
updated_at: Date;
}
export interface Portfolio {
id: string;
name: string;
cash_balance: number;
total_value: number;
unrealized_pnl: number;
realized_pnl: number;
user_id: string;
created_at: Date;
updated_at: Date;
}
export interface Strategy {
id: string;
name: string;
description: string;
parameters: Record<string, any>;
status: 'active' | 'inactive' | 'paused';
performance_metrics: Record<string, number>;
portfolio_id: string;
created_at: Date;
updated_at: Date;
}
export interface RiskLimit {
id: string;
type: 'position_size' | 'daily_loss' | 'max_drawdown' | 'concentration';
value: number;
threshold: number;
status: 'active' | 'breached' | 'disabled';
portfolio_id?: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
}
export interface AuditLog {
id: string;
action: string;
entity_type: string;
entity_id: string;
old_values?: Record<string, any>;
new_values?: Record<string, any>;
user_id?: string;
ip_address?: string;
user_agent?: string;
timestamp: Date;
}
/**
* Query Builder Types
*/
export interface WhereCondition {
column: string;
operator: '=' | '!=' | '>' | '<' | '>=' | '<=' | 'IN' | 'NOT IN' | 'LIKE' | 'ILIKE' | 'IS NULL' | 'IS NOT NULL';
value?: any;
}
export interface JoinCondition {
type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL';
table: string;
on: string;
}
export interface OrderByCondition {
column: string;
direction: 'ASC' | 'DESC';
}

View file

@ -1,13 +1,13 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" },
{ "path": "../logger" }
]
}
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"outDir": "./dist",
"rootDir": "./src"
},
"include": ["src/**/*"],
"references": [
{ "path": "../types" },
{ "path": "../config" },
{ "path": "../logger" }
]
}

View file

@ -1,10 +1,10 @@
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}
{
"extends": ["//"],
"tasks": {
"build": {
"dependsOn": ["@stock-bot/types#build", "@stock-bot/config#build", "@stock-bot/logger#build"],
"outputs": ["dist/**"],
"inputs": ["src/**", "package.json", "tsconfig.json", "!**/*.test.ts", "!**/*.spec.ts", "!**/test/**", "!**/tests/**", "!**/__tests__/**"]
}
}
}

View file

@ -1,102 +1,102 @@
# QuestDB Client Library
A comprehensive QuestDB client library for the Stock Bot trading platform, optimized for time-series data, market analytics, and high-performance queries.
## Features
- **Time-Series Optimized**: Built specifically for time-series data patterns
- **Dual Protocol Support**: HTTP REST API and PostgreSQL wire protocol
- **InfluxDB Line Protocol**: High-performance data ingestion
- **SQL Analytics**: Full SQL support for complex analytics
- **Schema Management**: Automatic table creation and partitioning
- **Performance Monitoring**: Query performance tracking and optimization
- **Health Monitoring**: Connection health monitoring and metrics
## Usage
```typescript
import { QuestDBClient } from '@stock-bot/questdb-client';
// Initialize client
const questClient = new QuestDBClient();
await questClient.connect();
// Insert market data using InfluxDB Line Protocol
await questClient.insert('ohlcv', {
symbol: 'AAPL',
open: 150.00,
high: 152.00,
low: 149.50,
close: 151.50,
volume: 1000000,
timestamp: new Date()
});
// Query with SQL
const prices = await questClient.query(`
SELECT symbol, close, timestamp
FROM ohlcv
WHERE symbol = 'AAPL'
AND timestamp > dateadd('d', -1, now())
ORDER BY timestamp DESC
`);
// Time-series aggregations
const dailyStats = await questClient.aggregate('ohlcv')
.select(['symbol', 'avg(close) as avg_price'])
.where('symbol = ?', ['AAPL'])
.groupBy('symbol')
.sampleBy('1d', 'timestamp')
.execute();
```
## Data Types
The client provides typed access to the following time-series data:
- **ohlcv**: OHLCV candlestick data
- **trades**: Individual trade executions
- **quotes**: Bid/ask quote data
- **indicators**: Technical indicator values
- **performance**: Portfolio performance metrics
- **risk_metrics**: Risk calculation results
## Configuration
Configure using environment variables:
```env
QUESTDB_HOST=localhost
QUESTDB_HTTP_PORT=9000
QUESTDB_PG_PORT=8812
QUESTDB_INFLUX_PORT=9009
```
## Time-Series Features
QuestDB excels at:
- **High-frequency data**: Millions of data points per second
- **Time-based partitioning**: Automatic partitioning by time
- **ASOF JOINs**: Time-series specific joins
- **SAMPLE BY**: Time-based aggregations
- **LATEST BY**: Get latest values by key
## Performance
The client includes performance optimizations:
- Connection pooling for HTTP and PostgreSQL protocols
- Batch insertions for high throughput
- Compressed data transfer
- Query result caching
- Automatic schema optimization
## Health Monitoring
Built-in health monitoring:
```typescript
const health = await questClient.getHealth();
console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy'
```
# QuestDB Client Library
A comprehensive QuestDB client library for the Stock Bot trading platform, optimized for time-series data, market analytics, and high-performance queries.
## Features
- **Time-Series Optimized**: Built specifically for time-series data patterns
- **Dual Protocol Support**: HTTP REST API and PostgreSQL wire protocol
- **InfluxDB Line Protocol**: High-performance data ingestion
- **SQL Analytics**: Full SQL support for complex analytics
- **Schema Management**: Automatic table creation and partitioning
- **Performance Monitoring**: Query performance tracking and optimization
- **Health Monitoring**: Connection health monitoring and metrics
## Usage
```typescript
import { QuestDBClient } from '@stock-bot/questdb-client';
// Initialize client
const questClient = new QuestDBClient();
await questClient.connect();
// Insert market data using InfluxDB Line Protocol
await questClient.insert('ohlcv', {
symbol: 'AAPL',
open: 150.00,
high: 152.00,
low: 149.50,
close: 151.50,
volume: 1000000,
timestamp: new Date()
});
// Query with SQL
const prices = await questClient.query(`
SELECT symbol, close, timestamp
FROM ohlcv
WHERE symbol = 'AAPL'
AND timestamp > dateadd('d', -1, now())
ORDER BY timestamp DESC
`);
// Time-series aggregations
const dailyStats = await questClient.aggregate('ohlcv')
.select(['symbol', 'avg(close) as avg_price'])
.where('symbol = ?', ['AAPL'])
.groupBy('symbol')
.sampleBy('1d', 'timestamp')
.execute();
```
## Data Types
The client provides typed access to the following time-series data:
- **ohlcv**: OHLCV candlestick data
- **trades**: Individual trade executions
- **quotes**: Bid/ask quote data
- **indicators**: Technical indicator values
- **performance**: Portfolio performance metrics
- **risk_metrics**: Risk calculation results
## Configuration
Configure using environment variables:
```env
QUESTDB_HOST=localhost
QUESTDB_HTTP_PORT=9000
QUESTDB_PG_PORT=8812
QUESTDB_INFLUX_PORT=9009
```
## Time-Series Features
QuestDB excels at:
- **High-frequency data**: Millions of data points per second
- **Time-based partitioning**: Automatic partitioning by time
- **ASOF JOINs**: Time-series specific joins
- **SAMPLE BY**: Time-based aggregations
- **LATEST BY**: Get latest values by key
## Performance
The client includes performance optimizations:
- Connection pooling for HTTP and PostgreSQL protocols
- Batch insertions for high throughput
- Compressed data transfer
- Query result caching
- Automatic schema optimization
## Health Monitoring
Built-in health monitoring:
```typescript
const health = await questClient.getHealth();
console.log(health.status); // 'healthy' | 'degraded' | 'unhealthy'
```

View file

@ -1,14 +1,14 @@
# QuestDB Client Library Bun Test Configuration
[test]
# Configure path mapping for tests
preload = ["./test/setup.ts"]
# Test configuration
timeout = 5000
# Enable TypeScript paths resolution
[bun]
paths = {
"@/*" = ["./src/*"]
}
# QuestDB Client Library Bun Test Configuration
[test]
# Configure path mapping for tests
preload = ["./test/setup.ts"]
# Test configuration
timeout = 5000
# Enable TypeScript paths resolution
[bun]
paths = {
"@/*" = ["./src/*"]
}

View file

@ -1,45 +1,45 @@
{
"name": "@stock-bot/questdb-client",
"version": "1.0.0",
"description": "QuestDB client library for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"keywords": [
"questdb",
"database",
"client",
"stock-bot"
],
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}
{
"name": "@stock-bot/questdb-client",
"version": "1.0.0",
"description": "QuestDB client library for Stock Bot platform",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"type": "module",
"scripts": {
"build": "tsc",
"test": "bun test",
"lint": "eslint src/**/*.ts",
"type-check": "tsc --noEmit",
"clean": "rimraf dist"
},
"dependencies": {
"@stock-bot/config": "*",
"@stock-bot/logger": "*",
"@stock-bot/types": "*"
},
"devDependencies": {
"@types/node": "^20.11.0",
"typescript": "^5.3.0",
"eslint": "^8.56.0",
"@typescript-eslint/eslint-plugin": "^6.19.0",
"@typescript-eslint/parser": "^6.19.0",
"bun-types": "^1.2.15"
},
"keywords": [
"questdb",
"database",
"client",
"stock-bot"
],
"exports": {
".": {
"import": "./dist/index.js",
"require": "./dist/index.js",
"types": "./dist/index.d.ts"
}
},
"files": [
"dist",
"README.md"
]
}

View file

@ -1,471 +1,471 @@
import { Pool } from 'pg';
import { questdbConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import type {
QuestDBClientConfig,
QuestDBConnectionOptions,
QueryResult,
InsertResult,
BaseTimeSeriesData,
TableNames
} from './types';
import { QuestDBHealthMonitor } from './health';
import { QuestDBQueryBuilder } from './query-builder';
import { QuestDBInfluxWriter } from './influx-writer';
import { QuestDBSchemaManager } from './schema';
/**
* QuestDB Client for Stock Bot
*
* Provides high-performance time-series data access with support for
* multiple protocols (HTTP, PostgreSQL, InfluxDB Line Protocol).
*/
export class QuestDBClient {
private pgPool: Pool | null = null;
private readonly config: QuestDBClientConfig;
private readonly options: QuestDBConnectionOptions;
private readonly logger = getLogger('QuestDBClient');
private readonly healthMonitor: QuestDBHealthMonitor;
private readonly influxWriter: QuestDBInfluxWriter;
private readonly schemaManager: QuestDBSchemaManager;
private isConnected = false;
constructor(
config?: Partial<QuestDBClientConfig>,
options?: QuestDBConnectionOptions
) {
this.config = this.buildConfig(config);
this.options = {
protocol: 'pg',
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options
};
this.healthMonitor = new QuestDBHealthMonitor(this);
this.influxWriter = new QuestDBInfluxWriter(this);
this.schemaManager = new QuestDBSchemaManager(this);
}
/**
* Connect to QuestDB
*/
async connect(): Promise<void> {
if (this.isConnected) {
return;
}
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(`Connecting to QuestDB (attempt ${attempt}/${this.options.retryAttempts})...`);
// Connect via PostgreSQL wire protocol
this.pgPool = new Pool(this.buildPgPoolConfig());
// Test the connection
const client = await this.pgPool.connect();
await client.query('SELECT 1');
client.release();
this.isConnected = true;
this.logger.info('Successfully connected to QuestDB');
// Initialize schema
await this.schemaManager.initializeDatabase();
// Start health monitoring
this.healthMonitor.startMonitoring();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`QuestDB connection attempt ${attempt} failed:`, error);
if (this.pgPool) {
await this.pgPool.end();
this.pgPool = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(`Failed to connect to QuestDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`);
}
/**
* Disconnect from QuestDB
*/
async disconnect(): Promise<void> {
if (!this.isConnected) {
return;
} try {
this.healthMonitor.stopMonitoring();
if (this.pgPool) {
await this.pgPool.end();
this.pgPool = null;
}
this.isConnected = false;
this.logger.info('Disconnected from QuestDB');
} catch (error) {
this.logger.error('Error disconnecting from QuestDB:', error);
throw error;
}
}
/**
* Execute a SQL query
*/
async query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>> {
if (!this.pgPool) {
throw new Error('QuestDB client not connected');
}
const startTime = Date.now();
try {
const result = await this.pgPool.query(sql, params);
const executionTime = Date.now() - startTime;
this.logger.debug(`Query executed in ${executionTime}ms`, {
query: sql.substring(0, 100),
rowCount: result.rowCount
});
return {
rows: result.rows,
rowCount: result.rowCount || 0,
executionTime, metadata: {
columns: result.fields?.map((field: any) => ({
name: field.name,
type: this.mapDataType(field.dataTypeID)
})) || []
}
};
} catch (error) {
const executionTime = Date.now() - startTime;
this.logger.error(`Query failed after ${executionTime}ms:`, {
error: (error as Error).message,
query: sql,
params
});
throw error;
}
}
/**
* Write OHLCV data using InfluxDB Line Protocol
*/
async writeOHLCV(
symbol: string,
exchange: string,
data: Array<{
timestamp: Date;
open: number;
high: number;
low: number;
close: number;
volume: number;
}>
): Promise<void> {
return await this.influxWriter.writeOHLCV(symbol, exchange, data);
}
/**
* Write market analytics data
*/
async writeMarketAnalytics(
symbol: string,
exchange: string,
analytics: {
timestamp: Date;
rsi?: number;
macd?: number;
signal?: number;
histogram?: number;
bollinger_upper?: number;
bollinger_lower?: number;
volume_sma?: number;
}
): Promise<void> {
return await this.influxWriter.writeMarketAnalytics(symbol, exchange, analytics);
}
/**
* Get a query builder instance
*/
queryBuilder(): QuestDBQueryBuilder {
return new QuestDBQueryBuilder(this);
}
/**
* Create a SELECT query builder
*/
select(...columns: string[]): QuestDBQueryBuilder {
return this.queryBuilder().select(...columns);
}
/**
* Create an aggregation query builder
*/
aggregate(table: TableNames): QuestDBQueryBuilder {
return this.queryBuilder().from(table);
}
/**
* Execute a time-series specific query with SAMPLE BY
*/
async sampleBy<T = any>(
table: TableNames,
columns: string[],
interval: string,
timeColumn: string = 'timestamp',
where?: string,
params?: any[]
): Promise<QueryResult<T>> {
const columnsStr = columns.join(', ');
const whereClause = where ? `WHERE ${where}` : '';
const sql = `
SELECT ${columnsStr}
FROM ${table}
${whereClause}
SAMPLE BY ${interval}
ALIGN TO CALENDAR
`;
return await this.query<T>(sql, params);
}
/**
* Get latest values by symbol using LATEST BY
*/
async latestBy<T = any>(
table: TableNames,
columns: string | string[] = '*',
keyColumns: string | string[] = 'symbol'
): Promise<QueryResult<T>> {
const columnsStr = Array.isArray(columns) ? columns.join(', ') : columns;
const keyColumnsStr = Array.isArray(keyColumns) ? keyColumns.join(', ') : keyColumns;
const sql = `
SELECT ${columnsStr}
FROM ${table}
LATEST BY ${keyColumnsStr}
`;
return await this.query<T>(sql);
}
/**
* Execute ASOF JOIN for time-series correlation
*/
async asofJoin<T = any>(
leftTable: TableNames,
rightTable: TableNames,
joinCondition: string,
columns?: string[],
where?: string,
params?: any[]
): Promise<QueryResult<T>> {
const columnsStr = columns ? columns.join(', ') : '*';
const whereClause = where ? `WHERE ${where}` : '';
const sql = `
SELECT ${columnsStr}
FROM ${leftTable}
ASOF JOIN ${rightTable} ON ${joinCondition}
${whereClause}
`;
return await this.query<T>(sql, params);
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
const result = await this.query(`
SELECT
table_name,
row_count,
partition_count,
size_bytes
FROM tables()
WHERE table_name NOT LIKE 'sys.%'
ORDER BY row_count DESC
`);
return result.rows;
}
/**
* Get table information
*/
async getTableInfo(tableName: string): Promise<any> {
const result = await this.query(
`SELECT * FROM table_columns WHERE table_name = ?`,
[tableName]
);
return result.rows;
}
/**
* Check if PostgreSQL pool is healthy
*/
isPgPoolHealthy(): boolean {
return this.pgPool !== null && !this.pgPool.ended;
}
/**
* Get HTTP endpoint URL
*/
getHttpUrl(): string {
const protocol = this.config.tls?.enabled ? 'https' : 'http';
return `${protocol}://${this.config.host}:${this.config.httpPort}`;
}
/**
* Get InfluxDB endpoint URL
*/
getInfluxUrl(): string {
const protocol = this.config.tls?.enabled ? 'https' : 'http';
return `${protocol}://${this.config.host}:${this.config.influxPort}`;
}
/**
* Get health monitor instance
*/
getHealthMonitor(): QuestDBHealthMonitor {
return this.healthMonitor;
}
/**
* Get schema manager instance
*/
getSchemaManager(): QuestDBSchemaManager {
return this.schemaManager;
}
/**
* Get InfluxDB writer instance
*/
getInfluxWriter(): QuestDBInfluxWriter {
return this.influxWriter;
}
/**
* Optimize table by rebuilding partitions
*/
async optimizeTable(tableName: string): Promise<void> {
await this.query(`VACUUM TABLE ${tableName}`);
this.logger.info(`Optimized table: ${tableName}`);
}
/**
* Create a table with time-series optimizations
*/
async createTable(
tableName: string,
columns: string,
partitionBy: string = 'DAY',
timestampColumn: string = 'timestamp'
): Promise<void> {
const sql = `
CREATE TABLE IF NOT EXISTS ${tableName} (
${columns}
) TIMESTAMP(${timestampColumn}) PARTITION BY ${partitionBy}
`;
await this.query(sql);
this.logger.info(`Created table: ${tableName}`);
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.pgPool;
}
/**
* Get the PostgreSQL connection pool
*/
get connectionPool(): Pool | null {
return this.pgPool;
}
/**
* Get configuration
*/
get configuration(): QuestDBClientConfig {
return { ...this.config };
}
private buildConfig(config?: Partial<QuestDBClientConfig>): QuestDBClientConfig {
return {
host: config?.host || questdbConfig.QUESTDB_HOST,
httpPort: config?.httpPort || questdbConfig.QUESTDB_HTTP_PORT,
pgPort: config?.pgPort || questdbConfig.QUESTDB_PG_PORT,
influxPort: config?.influxPort || questdbConfig.QUESTDB_INFLUX_PORT,
user: config?.user || questdbConfig.QUESTDB_USER,
password: config?.password || questdbConfig.QUESTDB_PASSWORD,
database: config?.database || questdbConfig.QUESTDB_DEFAULT_DATABASE,
tls: {
enabled: questdbConfig.QUESTDB_TLS_ENABLED,
verifyServerCert: questdbConfig.QUESTDB_TLS_VERIFY_SERVER_CERT,
...config?.tls
},
timeouts: {
connection: questdbConfig.QUESTDB_CONNECTION_TIMEOUT,
request: questdbConfig.QUESTDB_REQUEST_TIMEOUT,
...config?.timeouts
},
retryAttempts: questdbConfig.QUESTDB_RETRY_ATTEMPTS,
...config
};
}
private buildPgPoolConfig(): any {
return {
host: this.config.host,
port: this.config.pgPort,
database: this.config.database,
user: this.config.user,
password: this.config.password,
connectionTimeoutMillis: this.config.timeouts?.connection,
query_timeout: this.config.timeouts?.request,
ssl: this.config.tls?.enabled ? {
rejectUnauthorized: this.config.tls.verifyServerCert
} : false,
min: 2,
max: 10
};
}
private mapDataType(typeId: number): string {
// Map PostgreSQL type IDs to QuestDB types
const typeMap: Record<number, string> = {
16: 'BOOLEAN',
20: 'LONG',
21: 'INT',
23: 'INT',
25: 'STRING',
700: 'FLOAT',
701: 'DOUBLE',
1043: 'STRING',
1082: 'DATE',
1114: 'TIMESTAMP',
1184: 'TIMESTAMP'
};
return typeMap[typeId] || 'STRING';
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
import { Pool } from 'pg';
import { questdbConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import type {
QuestDBClientConfig,
QuestDBConnectionOptions,
QueryResult,
InsertResult,
BaseTimeSeriesData,
TableNames
} from './types';
import { QuestDBHealthMonitor } from './health';
import { QuestDBQueryBuilder } from './query-builder';
import { QuestDBInfluxWriter } from './influx-writer';
import { QuestDBSchemaManager } from './schema';
/**
* QuestDB Client for Stock Bot
*
* Provides high-performance time-series data access with support for
* multiple protocols (HTTP, PostgreSQL, InfluxDB Line Protocol).
*/
export class QuestDBClient {
private pgPool: Pool | null = null;
private readonly config: QuestDBClientConfig;
private readonly options: QuestDBConnectionOptions;
private readonly logger = getLogger('QuestDBClient');
private readonly healthMonitor: QuestDBHealthMonitor;
private readonly influxWriter: QuestDBInfluxWriter;
private readonly schemaManager: QuestDBSchemaManager;
private isConnected = false;
constructor(
config?: Partial<QuestDBClientConfig>,
options?: QuestDBConnectionOptions
) {
this.config = this.buildConfig(config);
this.options = {
protocol: 'pg',
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options
};
this.healthMonitor = new QuestDBHealthMonitor(this);
this.influxWriter = new QuestDBInfluxWriter(this);
this.schemaManager = new QuestDBSchemaManager(this);
}
/**
* Connect to QuestDB
*/
async connect(): Promise<void> {
if (this.isConnected) {
return;
}
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(`Connecting to QuestDB (attempt ${attempt}/${this.options.retryAttempts})...`);
// Connect via PostgreSQL wire protocol
this.pgPool = new Pool(this.buildPgPoolConfig());
// Test the connection
const client = await this.pgPool.connect();
await client.query('SELECT 1');
client.release();
this.isConnected = true;
this.logger.info('Successfully connected to QuestDB');
// Initialize schema
await this.schemaManager.initializeDatabase();
// Start health monitoring
this.healthMonitor.startMonitoring();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`QuestDB connection attempt ${attempt} failed:`, error);
if (this.pgPool) {
await this.pgPool.end();
this.pgPool = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(`Failed to connect to QuestDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`);
}
/**
* Disconnect from QuestDB
*/
async disconnect(): Promise<void> {
if (!this.isConnected) {
return;
} try {
this.healthMonitor.stopMonitoring();
if (this.pgPool) {
await this.pgPool.end();
this.pgPool = null;
}
this.isConnected = false;
this.logger.info('Disconnected from QuestDB');
} catch (error) {
this.logger.error('Error disconnecting from QuestDB:', error);
throw error;
}
}
/**
* Execute a SQL query
*/
async query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>> {
if (!this.pgPool) {
throw new Error('QuestDB client not connected');
}
const startTime = Date.now();
try {
const result = await this.pgPool.query(sql, params);
const executionTime = Date.now() - startTime;
this.logger.debug(`Query executed in ${executionTime}ms`, {
query: sql.substring(0, 100),
rowCount: result.rowCount
});
return {
rows: result.rows,
rowCount: result.rowCount || 0,
executionTime, metadata: {
columns: result.fields?.map((field: any) => ({
name: field.name,
type: this.mapDataType(field.dataTypeID)
})) || []
}
};
} catch (error) {
const executionTime = Date.now() - startTime;
this.logger.error(`Query failed after ${executionTime}ms:`, {
error: (error as Error).message,
query: sql,
params
});
throw error;
}
}
/**
* Write OHLCV data using InfluxDB Line Protocol
*/
async writeOHLCV(
symbol: string,
exchange: string,
data: Array<{
timestamp: Date;
open: number;
high: number;
low: number;
close: number;
volume: number;
}>
): Promise<void> {
return await this.influxWriter.writeOHLCV(symbol, exchange, data);
}
/**
* Write market analytics data
*/
async writeMarketAnalytics(
symbol: string,
exchange: string,
analytics: {
timestamp: Date;
rsi?: number;
macd?: number;
signal?: number;
histogram?: number;
bollinger_upper?: number;
bollinger_lower?: number;
volume_sma?: number;
}
): Promise<void> {
return await this.influxWriter.writeMarketAnalytics(symbol, exchange, analytics);
}
/**
* Get a query builder instance
*/
queryBuilder(): QuestDBQueryBuilder {
return new QuestDBQueryBuilder(this);
}
/**
* Create a SELECT query builder
*/
select(...columns: string[]): QuestDBQueryBuilder {
return this.queryBuilder().select(...columns);
}
/**
* Create an aggregation query builder
*/
aggregate(table: TableNames): QuestDBQueryBuilder {
return this.queryBuilder().from(table);
}
/**
* Execute a time-series specific query with SAMPLE BY
*/
async sampleBy<T = any>(
table: TableNames,
columns: string[],
interval: string,
timeColumn: string = 'timestamp',
where?: string,
params?: any[]
): Promise<QueryResult<T>> {
const columnsStr = columns.join(', ');
const whereClause = where ? `WHERE ${where}` : '';
const sql = `
SELECT ${columnsStr}
FROM ${table}
${whereClause}
SAMPLE BY ${interval}
ALIGN TO CALENDAR
`;
return await this.query<T>(sql, params);
}
/**
* Get latest values by symbol using LATEST BY
*/
async latestBy<T = any>(
table: TableNames,
columns: string | string[] = '*',
keyColumns: string | string[] = 'symbol'
): Promise<QueryResult<T>> {
const columnsStr = Array.isArray(columns) ? columns.join(', ') : columns;
const keyColumnsStr = Array.isArray(keyColumns) ? keyColumns.join(', ') : keyColumns;
const sql = `
SELECT ${columnsStr}
FROM ${table}
LATEST BY ${keyColumnsStr}
`;
return await this.query<T>(sql);
}
/**
* Execute ASOF JOIN for time-series correlation
*/
async asofJoin<T = any>(
leftTable: TableNames,
rightTable: TableNames,
joinCondition: string,
columns?: string[],
where?: string,
params?: any[]
): Promise<QueryResult<T>> {
const columnsStr = columns ? columns.join(', ') : '*';
const whereClause = where ? `WHERE ${where}` : '';
const sql = `
SELECT ${columnsStr}
FROM ${leftTable}
ASOF JOIN ${rightTable} ON ${joinCondition}
${whereClause}
`;
return await this.query<T>(sql, params);
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
const result = await this.query(`
SELECT
table_name,
row_count,
partition_count,
size_bytes
FROM tables()
WHERE table_name NOT LIKE 'sys.%'
ORDER BY row_count DESC
`);
return result.rows;
}
/**
* Get table information
*/
async getTableInfo(tableName: string): Promise<any> {
const result = await this.query(
`SELECT * FROM table_columns WHERE table_name = ?`,
[tableName]
);
return result.rows;
}
/**
* Check if PostgreSQL pool is healthy
*/
isPgPoolHealthy(): boolean {
return this.pgPool !== null && !this.pgPool.ended;
}
/**
* Get HTTP endpoint URL
*/
getHttpUrl(): string {
const protocol = this.config.tls?.enabled ? 'https' : 'http';
return `${protocol}://${this.config.host}:${this.config.httpPort}`;
}
/**
* Get InfluxDB endpoint URL
*/
getInfluxUrl(): string {
const protocol = this.config.tls?.enabled ? 'https' : 'http';
return `${protocol}://${this.config.host}:${this.config.influxPort}`;
}
/**
* Get health monitor instance
*/
getHealthMonitor(): QuestDBHealthMonitor {
return this.healthMonitor;
}
/**
* Get schema manager instance
*/
getSchemaManager(): QuestDBSchemaManager {
return this.schemaManager;
}
/**
* Get InfluxDB writer instance
*/
getInfluxWriter(): QuestDBInfluxWriter {
return this.influxWriter;
}
/**
* Optimize table by rebuilding partitions
*/
async optimizeTable(tableName: string): Promise<void> {
await this.query(`VACUUM TABLE ${tableName}`);
this.logger.info(`Optimized table: ${tableName}`);
}
/**
* Create a table with time-series optimizations
*/
async createTable(
tableName: string,
columns: string,
partitionBy: string = 'DAY',
timestampColumn: string = 'timestamp'
): Promise<void> {
const sql = `
CREATE TABLE IF NOT EXISTS ${tableName} (
${columns}
) TIMESTAMP(${timestampColumn}) PARTITION BY ${partitionBy}
`;
await this.query(sql);
this.logger.info(`Created table: ${tableName}`);
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.pgPool;
}
/**
* Get the PostgreSQL connection pool
*/
get connectionPool(): Pool | null {
return this.pgPool;
}
/**
* Get configuration
*/
get configuration(): QuestDBClientConfig {
return { ...this.config };
}
private buildConfig(config?: Partial<QuestDBClientConfig>): QuestDBClientConfig {
return {
host: config?.host || questdbConfig.QUESTDB_HOST,
httpPort: config?.httpPort || questdbConfig.QUESTDB_HTTP_PORT,
pgPort: config?.pgPort || questdbConfig.QUESTDB_PG_PORT,
influxPort: config?.influxPort || questdbConfig.QUESTDB_INFLUX_PORT,
user: config?.user || questdbConfig.QUESTDB_USER,
password: config?.password || questdbConfig.QUESTDB_PASSWORD,
database: config?.database || questdbConfig.QUESTDB_DEFAULT_DATABASE,
tls: {
enabled: questdbConfig.QUESTDB_TLS_ENABLED,
verifyServerCert: questdbConfig.QUESTDB_TLS_VERIFY_SERVER_CERT,
...config?.tls
},
timeouts: {
connection: questdbConfig.QUESTDB_CONNECTION_TIMEOUT,
request: questdbConfig.QUESTDB_REQUEST_TIMEOUT,
...config?.timeouts
},
retryAttempts: questdbConfig.QUESTDB_RETRY_ATTEMPTS,
...config
};
}
private buildPgPoolConfig(): any {
return {
host: this.config.host,
port: this.config.pgPort,
database: this.config.database,
user: this.config.user,
password: this.config.password,
connectionTimeoutMillis: this.config.timeouts?.connection,
query_timeout: this.config.timeouts?.request,
ssl: this.config.tls?.enabled ? {
rejectUnauthorized: this.config.tls.verifyServerCert
} : false,
min: 2,
max: 10
};
}
private mapDataType(typeId: number): string {
// Map PostgreSQL type IDs to QuestDB types
const typeMap: Record<number, string> = {
16: 'BOOLEAN',
20: 'LONG',
21: 'INT',
23: 'INT',
25: 'STRING',
700: 'FLOAT',
701: 'DOUBLE',
1043: 'STRING',
1082: 'DATE',
1114: 'TIMESTAMP',
1184: 'TIMESTAMP'
};
return typeMap[typeId] || 'STRING';
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}

View file

@ -1,63 +1,63 @@
import { QuestDBClient } from './client';
import { questdbConfig } from '@stock-bot/config';
import type { QuestDBClientConfig, QuestDBConnectionOptions } from './types';
/**
* Factory function to create a QuestDB client instance
*/
export function createQuestDBClient(
config?: Partial<QuestDBClientConfig>,
options?: QuestDBConnectionOptions
): QuestDBClient {
return new QuestDBClient(config, options);
}
/**
* Create a QuestDB client with default configuration
*/
export function createDefaultQuestDBClient(): QuestDBClient {
const config: Partial<QuestDBClientConfig> = {
host: questdbConfig.QUESTDB_HOST,
httpPort: questdbConfig.QUESTDB_HTTP_PORT,
pgPort: questdbConfig.QUESTDB_PG_PORT,
influxPort: questdbConfig.QUESTDB_INFLUX_PORT,
user: questdbConfig.QUESTDB_USER,
password: questdbConfig.QUESTDB_PASSWORD
};
return new QuestDBClient(config);
}
/**
* Singleton QuestDB client instance
*/
let defaultClient: QuestDBClient | null = null;
/**
* Get or create the default QuestDB client instance
*/
export function getQuestDBClient(): QuestDBClient {
if (!defaultClient) {
defaultClient = createDefaultQuestDBClient();
}
return defaultClient;
}
/**
* Connect to QuestDB using the default client
*/
export async function connectQuestDB(): Promise<QuestDBClient> {
const client = getQuestDBClient();
await client.connect();
return client;
}
/**
* Disconnect from QuestDB
*/
export async function disconnectQuestDB(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}
import { QuestDBClient } from './client';
import { questdbConfig } from '@stock-bot/config';
import type { QuestDBClientConfig, QuestDBConnectionOptions } from './types';
/**
* Factory function to create a QuestDB client instance
*/
export function createQuestDBClient(
config?: Partial<QuestDBClientConfig>,
options?: QuestDBConnectionOptions
): QuestDBClient {
return new QuestDBClient(config, options);
}
/**
* Create a QuestDB client with default configuration
*/
export function createDefaultQuestDBClient(): QuestDBClient {
const config: Partial<QuestDBClientConfig> = {
host: questdbConfig.QUESTDB_HOST,
httpPort: questdbConfig.QUESTDB_HTTP_PORT,
pgPort: questdbConfig.QUESTDB_PG_PORT,
influxPort: questdbConfig.QUESTDB_INFLUX_PORT,
user: questdbConfig.QUESTDB_USER,
password: questdbConfig.QUESTDB_PASSWORD
};
return new QuestDBClient(config);
}
/**
* Singleton QuestDB client instance
*/
let defaultClient: QuestDBClient | null = null;
/**
* Get or create the default QuestDB client instance
*/
export function getQuestDBClient(): QuestDBClient {
if (!defaultClient) {
defaultClient = createDefaultQuestDBClient();
}
return defaultClient;
}
/**
* Connect to QuestDB using the default client
*/
export async function connectQuestDB(): Promise<QuestDBClient> {
const client = getQuestDBClient();
await client.connect();
return client;
}
/**
* Disconnect from QuestDB
*/
export async function disconnectQuestDB(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}

Some files were not shown because too many files have changed in this diff Show more