running prettier for cleanup

This commit is contained in:
Boki 2025-06-11 10:13:25 -04:00
parent 24b7ed15e4
commit 8955544593
151 changed files with 29158 additions and 27966 deletions

View file

@ -1,6 +1,6 @@
import Redis from 'ioredis';
import { getLogger } from '@stock-bot/logger';
import { dragonflyConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
interface ConnectionConfig {
name: string;
@ -33,7 +33,7 @@ export class RedisConnectionManager {
*/
getConnection(config: ConnectionConfig): Redis {
const { name, singleton = false, db } = config;
if (singleton) {
// Use shared connection across all instances
if (!RedisConnectionManager.sharedConnections.has(name)) {
@ -66,7 +66,9 @@ export class RedisConnectionManager {
retryDelayOnFailover: dragonflyConfig.DRAGONFLY_RETRY_DELAY,
connectTimeout: dragonflyConfig.DRAGONFLY_CONNECT_TIMEOUT,
commandTimeout: dragonflyConfig.DRAGONFLY_COMMAND_TIMEOUT,
keepAlive: dragonflyConfig.DRAGONFLY_ENABLE_KEEPALIVE ? dragonflyConfig.DRAGONFLY_KEEPALIVE_INTERVAL * 1000 : 0,
keepAlive: dragonflyConfig.DRAGONFLY_ENABLE_KEEPALIVE
? dragonflyConfig.DRAGONFLY_KEEPALIVE_INTERVAL * 1000
: 0,
connectionName: name,
lazyConnect: false, // Connect immediately instead of waiting for first command
...(dragonflyConfig.DRAGONFLY_TLS && {
@ -90,7 +92,7 @@ export class RedisConnectionManager {
this.logger.info(`Redis connection ready: ${name}`);
});
redis.on('error', (err) => {
redis.on('error', err => {
this.logger.error(`Redis connection error for ${name}:`, err);
});
@ -121,7 +123,7 @@ export class RedisConnectionManager {
*/
async closeAllConnections(): Promise<void> {
// Close instance-specific connections
const instancePromises = Array.from(this.connections.values()).map(conn =>
const instancePromises = Array.from(this.connections.values()).map(conn =>
this.closeConnection(conn)
);
await Promise.all(instancePromises);
@ -129,8 +131,8 @@ export class RedisConnectionManager {
// Close shared connections (only if this is the last instance)
if (RedisConnectionManager.instance === this) {
const sharedPromises = Array.from(RedisConnectionManager.sharedConnections.values()).map(conn =>
this.closeConnection(conn)
const sharedPromises = Array.from(RedisConnectionManager.sharedConnections.values()).map(
conn => this.closeConnection(conn)
);
await Promise.all(sharedPromises);
RedisConnectionManager.sharedConnections.clear();
@ -145,7 +147,7 @@ export class RedisConnectionManager {
getConnectionCount(): { shared: number; unique: number } {
return {
shared: RedisConnectionManager.sharedConnections.size,
unique: this.connections.size
unique: this.connections.size,
};
}
@ -155,7 +157,7 @@ export class RedisConnectionManager {
getConnectionNames(): { shared: string[]; unique: string[] } {
return {
shared: Array.from(RedisConnectionManager.sharedConnections.keys()),
unique: Array.from(this.connections.keys())
unique: Array.from(this.connections.keys()),
};
}
@ -198,10 +200,7 @@ export class RedisConnectionManager {
*/
static async waitForAllConnections(timeout: number = 30000): Promise<void> {
const instance = this.getInstance();
const allConnections = new Map([
...instance.connections,
...this.sharedConnections
]);
const allConnections = new Map([...instance.connections, ...this.sharedConnections]);
if (allConnections.size === 0) {
instance.logger.info('No Redis connections to wait for');
@ -210,7 +209,7 @@ export class RedisConnectionManager {
instance.logger.info(`Waiting for ${allConnections.size} Redis connections to be ready...`);
const connectionPromises = Array.from(allConnections.entries()).map(([name, redis]) =>
const connectionPromises = Array.from(allConnections.entries()).map(([name, redis]) =>
instance.waitForConnection(redis, name, timeout)
);
@ -259,15 +258,12 @@ export class RedisConnectionManager {
*/
static areAllConnectionsReady(): boolean {
const instance = this.getInstance();
const allConnections = new Map([
...instance.connections,
...this.sharedConnections
]);
return allConnections.size > 0 &&
Array.from(allConnections.keys()).every(name =>
this.readyConnections.has(name)
);
const allConnections = new Map([...instance.connections, ...this.sharedConnections]);
return (
allConnections.size > 0 &&
Array.from(allConnections.keys()).every(name => this.readyConnections.has(name))
);
}
}

View file

@ -1,92 +1,91 @@
import { RedisCache } from './redis-cache';
import { RedisConnectionManager } from './connection-manager';
import type { CacheProvider, CacheOptions } from './types';
// Cache instances registry to prevent multiple instances with same prefix
const cacheInstances = new Map<string, CacheProvider>();
/**
* Create a Redis cache instance with trading-optimized defaults
*/
export function createCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'cache:',
ttl: 3600, // 1 hour default
enableMetrics: true,
shared: true, // Default to shared connections
...options
};
// For shared connections, reuse cache instances with the same key prefix
if (defaultOptions.shared) {
const cacheKey = `${defaultOptions.keyPrefix}-${defaultOptions.ttl}`;
if (cacheInstances.has(cacheKey)) {
return cacheInstances.get(cacheKey)!;
}
const cache = new RedisCache(defaultOptions);
cacheInstances.set(cacheKey, cache);
return cache;
}
// For non-shared connections, always create new instances
return new RedisCache(defaultOptions);
}
/**
* Create a cache instance for trading data
*/
export function createTradingCache(options: Partial<CacheOptions> = {}): CacheProvider {
return createCache({
keyPrefix: 'trading:',
ttl: 3600, // 1 hour default
enableMetrics: true,
shared: true,
...options
});
}
/**
* Create a cache for market data with shorter TTL
*/
export function createMarketDataCache(options: Partial<CacheOptions> = {}): CacheProvider {
return createCache({
keyPrefix: 'market:',
ttl: 300, // 5 minutes for market data
enableMetrics: true,
shared: true,
...options
});
}
/**
* Create a cache for indicators with longer TTL
*/
export function createIndicatorCache(options: Partial<CacheOptions> = {}): CacheProvider {
return createCache({
keyPrefix: 'indicators:',
ttl: 1800, // 30 minutes for indicators
enableMetrics: true,
shared: true,
...options
});
}
// Export types and classes
export type {
CacheProvider,
CacheOptions,
CacheConfig,
CacheStats,
CacheKey,
SerializationOptions
} from './types';
export { RedisCache } from './redis-cache';
export { RedisConnectionManager } from './connection-manager';
export { CacheKeyGenerator } from './key-generator';
// Default export for convenience
export default createCache;
import { RedisConnectionManager } from './connection-manager';
import { RedisCache } from './redis-cache';
import type { CacheOptions, CacheProvider } from './types';
// Cache instances registry to prevent multiple instances with same prefix
const cacheInstances = new Map<string, CacheProvider>();
/**
* Create a Redis cache instance with trading-optimized defaults
*/
export function createCache(options: Partial<CacheOptions> = {}): CacheProvider {
const defaultOptions: CacheOptions = {
keyPrefix: 'cache:',
ttl: 3600, // 1 hour default
enableMetrics: true,
shared: true, // Default to shared connections
...options,
};
// For shared connections, reuse cache instances with the same key prefix
if (defaultOptions.shared) {
const cacheKey = `${defaultOptions.keyPrefix}-${defaultOptions.ttl}`;
if (cacheInstances.has(cacheKey)) {
return cacheInstances.get(cacheKey)!;
}
const cache = new RedisCache(defaultOptions);
cacheInstances.set(cacheKey, cache);
return cache;
}
// For non-shared connections, always create new instances
return new RedisCache(defaultOptions);
}
/**
* Create a cache instance for trading data
*/
export function createTradingCache(options: Partial<CacheOptions> = {}): CacheProvider {
return createCache({
keyPrefix: 'trading:',
ttl: 3600, // 1 hour default
enableMetrics: true,
shared: true,
...options,
});
}
/**
* Create a cache for market data with shorter TTL
*/
export function createMarketDataCache(options: Partial<CacheOptions> = {}): CacheProvider {
return createCache({
keyPrefix: 'market:',
ttl: 300, // 5 minutes for market data
enableMetrics: true,
shared: true,
...options,
});
}
/**
* Create a cache for indicators with longer TTL
*/
export function createIndicatorCache(options: Partial<CacheOptions> = {}): CacheProvider {
return createCache({
keyPrefix: 'indicators:',
ttl: 1800, // 30 minutes for indicators
enableMetrics: true,
shared: true,
...options,
});
}
// Export types and classes
export type {
CacheProvider,
CacheOptions,
CacheConfig,
CacheStats,
CacheKey,
SerializationOptions,
} from './types';
export { RedisCache } from './redis-cache';
export { RedisConnectionManager } from './connection-manager';
export { CacheKeyGenerator } from './key-generator';
// Default export for convenience
export default createCache;

View file

@ -1,73 +1,73 @@
export class CacheKeyGenerator {
/**
* Generate cache key for market data
*/
static marketData(symbol: string, timeframe: string, date?: Date): string {
const dateStr = date ? date.toISOString().split('T')[0] : 'latest';
return `market:${symbol.toLowerCase()}:${timeframe}:${dateStr}`;
}
/**
* Generate cache key for technical indicators
*/
static indicator(symbol: string, indicator: string, period: number, dataHash: string): string {
return `indicator:${symbol.toLowerCase()}:${indicator}:${period}:${dataHash}`;
}
/**
* Generate cache key for backtest results
*/
static backtest(strategyName: string, params: Record<string, any>): string {
const paramHash = this.hashObject(params);
return `backtest:${strategyName}:${paramHash}`;
}
/**
* Generate cache key for strategy results
*/
static strategy(strategyName: string, symbol: string, timeframe: string): string {
return `strategy:${strategyName}:${symbol.toLowerCase()}:${timeframe}`;
}
/**
* Generate cache key for user sessions
*/
static userSession(userId: string): string {
return `session:${userId}`;
}
/**
* Generate cache key for portfolio data
*/
static portfolio(userId: string, portfolioId: string): string {
return `portfolio:${userId}:${portfolioId}`;
}
/**
* Generate cache key for real-time prices
*/
static realtimePrice(symbol: string): string {
return `price:realtime:${symbol.toLowerCase()}`;
}
/**
* Generate cache key for order book data
*/
static orderBook(symbol: string, depth: number = 10): string {
return `orderbook:${symbol.toLowerCase()}:${depth}`;
}
/**
* Create a simple hash from object for cache keys
*/
private static hashObject(obj: Record<string, any>): string {
const str = JSON.stringify(obj, Object.keys(obj).sort());
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = ((hash << 5) - hash) + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}
}
export class CacheKeyGenerator {
/**
* Generate cache key for market data
*/
static marketData(symbol: string, timeframe: string, date?: Date): string {
const dateStr = date ? date.toISOString().split('T')[0] : 'latest';
return `market:${symbol.toLowerCase()}:${timeframe}:${dateStr}`;
}
/**
* Generate cache key for technical indicators
*/
static indicator(symbol: string, indicator: string, period: number, dataHash: string): string {
return `indicator:${symbol.toLowerCase()}:${indicator}:${period}:${dataHash}`;
}
/**
* Generate cache key for backtest results
*/
static backtest(strategyName: string, params: Record<string, any>): string {
const paramHash = this.hashObject(params);
return `backtest:${strategyName}:${paramHash}`;
}
/**
* Generate cache key for strategy results
*/
static strategy(strategyName: string, symbol: string, timeframe: string): string {
return `strategy:${strategyName}:${symbol.toLowerCase()}:${timeframe}`;
}
/**
* Generate cache key for user sessions
*/
static userSession(userId: string): string {
return `session:${userId}`;
}
/**
* Generate cache key for portfolio data
*/
static portfolio(userId: string, portfolioId: string): string {
return `portfolio:${userId}:${portfolioId}`;
}
/**
* Generate cache key for real-time prices
*/
static realtimePrice(symbol: string): string {
return `price:realtime:${symbol.toLowerCase()}`;
}
/**
* Generate cache key for order book data
*/
static orderBook(symbol: string, depth: number = 10): string {
return `orderbook:${symbol.toLowerCase()}:${depth}`;
}
/**
* Create a simple hash from object for cache keys
*/
private static hashObject(obj: Record<string, any>): string {
const str = JSON.stringify(obj, Object.keys(obj).sort());
let hash = 0;
for (let i = 0; i < str.length; i++) {
const char = str.charCodeAt(i);
hash = (hash << 5) - hash + char;
hash = hash & hash; // Convert to 32-bit integer
}
return Math.abs(hash).toString(36);
}
}

View file

@ -1,7 +1,7 @@
import Redis from 'ioredis';
import { getLogger } from '@stock-bot/logger';
import { CacheProvider, CacheOptions, CacheStats } from './types';
import { RedisConnectionManager } from './connection-manager';
import { CacheOptions, CacheProvider, CacheStats } from './types';
/**
* Simplified Redis-based cache provider using connection manager
@ -15,27 +15,33 @@ export class RedisCache implements CacheProvider {
private isConnected = false;
private startTime = Date.now();
private connectionManager: RedisConnectionManager;
private stats: CacheStats = {
hits: 0,
misses: 0,
errors: 0,
hitRate: 0,
total: 0,
uptime: 0
uptime: 0,
};
constructor(options: CacheOptions = {}) {
this.defaultTTL = options.ttl ?? 3600; // 1 hour default
this.keyPrefix = options.keyPrefix ?? 'cache:';
this.enableMetrics = options.enableMetrics ?? true;
// Get connection manager instance
this.connectionManager = RedisConnectionManager.getInstance();
// Generate connection name based on cache type
const baseName = options.name || this.keyPrefix.replace(':', '').replace(/[^a-zA-Z0-9]/g, '').toUpperCase() || 'CACHE';
const baseName =
options.name ||
this.keyPrefix
.replace(':', '')
.replace(/[^a-zA-Z0-9]/g, '')
.toUpperCase() ||
'CACHE';
// Get Redis connection (shared by default for cache)
this.redis = this.connectionManager.getConnection({
name: `${baseName}-SERVICE`,
@ -110,7 +116,7 @@ export class RedisCache implements CacheProvider {
return await operation();
} catch (error) {
this.logger.error(`Redis ${operationName} failed`, {
error: error instanceof Error ? error.message : String(error)
error: error instanceof Error ? error.message : String(error),
});
this.updateStats(false, true);
return fallback;
@ -122,7 +128,7 @@ export class RedisCache implements CacheProvider {
async () => {
const fullKey = this.getKey(key);
const value = await this.redis.get(fullKey);
if (value === null) {
this.updateStats(false);
this.logger.debug('Cache miss', { key });
@ -131,7 +137,7 @@ export class RedisCache implements CacheProvider {
this.updateStats(true);
this.logger.debug('Cache hit', { key });
try {
return JSON.parse(value) as T;
} catch {
@ -144,23 +150,29 @@ export class RedisCache implements CacheProvider {
);
}
async set<T>(key: string, value: T, options?: number | {
ttl?: number;
preserveTTL?: boolean;
onlyIfExists?: boolean;
onlyIfNotExists?: boolean;
getOldValue?: boolean;
}): Promise<T | null> {
async set<T>(
key: string,
value: T,
options?:
| number
| {
ttl?: number;
preserveTTL?: boolean;
onlyIfExists?: boolean;
onlyIfNotExists?: boolean;
getOldValue?: boolean;
}
): Promise<T | null> {
return this.safeExecute(
async () => {
const fullKey = this.getKey(key);
const serialized = typeof value === 'string' ? value : JSON.stringify(value);
// Handle backward compatibility - if options is a number, treat as TTL
const config = typeof options === 'number' ? { ttl: options } : (options || {});
const config = typeof options === 'number' ? { ttl: options } : options || {};
let oldValue: T | null = null;
// Get old value if requested
if (config.getOldValue) {
const existingValue = await this.redis.get(fullKey);
@ -172,15 +184,17 @@ export class RedisCache implements CacheProvider {
}
}
}
// Handle preserveTTL logic
if (config.preserveTTL) {
const currentTTL = await this.redis.ttl(fullKey);
if (currentTTL === -2) {
// Key doesn't exist
if (config.onlyIfExists) {
this.logger.debug('Set skipped - key does not exist and onlyIfExists is true', { key });
this.logger.debug('Set skipped - key does not exist and onlyIfExists is true', {
key,
});
return oldValue;
}
// Set with default or specified TTL
@ -201,7 +215,7 @@ export class RedisCache implements CacheProvider {
if (config.onlyIfExists && config.onlyIfNotExists) {
throw new Error('Cannot specify both onlyIfExists and onlyIfNotExists');
}
if (config.onlyIfExists) {
// Only set if key exists (XX flag)
const ttl = config.ttl ?? this.defaultTTL;
@ -223,10 +237,10 @@ export class RedisCache implements CacheProvider {
const ttl = config.ttl ?? this.defaultTTL;
await this.redis.setex(fullKey, ttl, serialized);
}
this.logger.debug('Cache set', { key, ttl: config.ttl ?? this.defaultTTL });
}
return oldValue;
},
null,
@ -278,8 +292,8 @@ export class RedisCache implements CacheProvider {
const pong = await this.redis.ping();
return pong === 'PONG';
} catch (error) {
this.logger.error('Redis health check failed', {
error: error instanceof Error ? error.message : String(error)
this.logger.error('Redis health check failed', {
error: error instanceof Error ? error.message : String(error),
});
return false;
}
@ -288,7 +302,7 @@ export class RedisCache implements CacheProvider {
getStats(): CacheStats {
return {
...this.stats,
uptime: Date.now() - this.startTime
uptime: Date.now() - this.startTime,
};
}
@ -308,7 +322,7 @@ export class RedisCache implements CacheProvider {
resolve();
});
this.redis.once('error', (error) => {
this.redis.once('error', error => {
clearTimeout(timeoutId);
reject(error);
});
@ -318,12 +332,12 @@ export class RedisCache implements CacheProvider {
isReady(): boolean {
// Always check the actual Redis connection status
const ready = this.redis.status === 'ready';
// Update local flag if we're not using shared connection
if (this.isConnected !== ready) {
this.isConnected = ready;
}
return ready;
}
@ -334,7 +348,7 @@ export class RedisCache implements CacheProvider {
async setIfExists<T>(key: string, value: T, ttl?: number): Promise<boolean> {
const result = await this.set(key, value, { ttl, onlyIfExists: true });
return result !== null || await this.exists(key);
return result !== null || (await this.exists(key));
}
async setIfNotExists<T>(key: string, value: T, ttl?: number): Promise<boolean> {
@ -347,11 +361,15 @@ export class RedisCache implements CacheProvider {
}
// Atomic update with transformation
async updateField<T>(key: string, updater: (current: T | null) => T, ttl?: number): Promise<T | null> {
async updateField<T>(
key: string,
updater: (current: T | null) => T,
ttl?: number
): Promise<T | null> {
return this.safeExecute(
async () => {
const fullKey = this.getKey(key);
// Use Lua script for atomic read-modify-write
const luaScript = `
local key = KEYS[1]
@ -363,13 +381,12 @@ export class RedisCache implements CacheProvider {
-- Return current value for processing
return {current_value, current_ttl}
`;
const [currentValue, currentTTL] = await this.redis.eval(
luaScript,
1,
fullKey
) as [string | null, number];
const [currentValue, currentTTL] = (await this.redis.eval(luaScript, 1, fullKey)) as [
string | null,
number,
];
// Parse current value
let parsed: T | null = null;
if (currentValue !== null) {
@ -379,10 +396,10 @@ export class RedisCache implements CacheProvider {
parsed = currentValue as unknown as T;
}
}
// Apply updater function
const newValue = updater(parsed);
// Set the new value with appropriate TTL logic
if (ttl !== undefined) {
// Use specified TTL
@ -394,7 +411,7 @@ export class RedisCache implements CacheProvider {
// Preserve existing TTL
await this.set(key, newValue, { preserveTTL: true });
}
return parsed;
},
null,

View file

@ -1,84 +1,90 @@
export interface CacheProvider {
get<T>(key: string): Promise<T | null>;
set<T>(key: string, value: T, options?: number | {
ttl?: number;
preserveTTL?: boolean;
onlyIfExists?: boolean;
onlyIfNotExists?: boolean;
getOldValue?: boolean;
}): Promise<T | null>;
del(key: string): Promise<void>;
exists(key: string): Promise<boolean>;
clear(): Promise<void>;
getStats(): CacheStats;
health(): Promise<boolean>;
/**
* Wait for the cache to be ready and connected
* @param timeout Maximum time to wait in milliseconds (default: 5000)
* @returns Promise that resolves when cache is ready
*/
waitForReady(timeout?: number): Promise<void>;
/**
* Check if the cache is currently ready
*/
isReady(): boolean;
// Enhanced cache methods
/**
* Update value preserving existing TTL
*/
update?<T>(key: string, value: T): Promise<T | null>;
/**
* Set value only if key exists
*/
setIfExists?<T>(key: string, value: T, ttl?: number): Promise<boolean>;
/**
* Set value only if key doesn't exist
*/
setIfNotExists?<T>(key: string, value: T, ttl?: number): Promise<boolean>;
/**
* Replace existing key's value and TTL
*/
replace?<T>(key: string, value: T, ttl?: number): Promise<T | null>;
/**
* Atomically update field with transformation function
*/
updateField?<T>(key: string, updater: (current: T | null) => T, ttl?: number): Promise<T | null>;
}
export interface CacheOptions {
ttl?: number;
keyPrefix?: string;
enableMetrics?: boolean;
name?: string; // Name for connection identification
shared?: boolean; // Whether to use shared connection
}
export interface CacheStats {
hits: number;
misses: number;
errors: number;
hitRate: number;
total: number;
uptime: number;
}
export interface CacheConfig {
type: 'redis';
keyPrefix?: string;
defaultTTL?: number;
enableMetrics?: boolean;
compression?: boolean;
}
export type CacheKey = string | (() => string);
export interface SerializationOptions {
compress?: boolean;
binary?: boolean;
}
export interface CacheProvider {
get<T>(key: string): Promise<T | null>;
set<T>(
key: string,
value: T,
options?:
| number
| {
ttl?: number;
preserveTTL?: boolean;
onlyIfExists?: boolean;
onlyIfNotExists?: boolean;
getOldValue?: boolean;
}
): Promise<T | null>;
del(key: string): Promise<void>;
exists(key: string): Promise<boolean>;
clear(): Promise<void>;
getStats(): CacheStats;
health(): Promise<boolean>;
/**
* Wait for the cache to be ready and connected
* @param timeout Maximum time to wait in milliseconds (default: 5000)
* @returns Promise that resolves when cache is ready
*/
waitForReady(timeout?: number): Promise<void>;
/**
* Check if the cache is currently ready
*/
isReady(): boolean;
// Enhanced cache methods
/**
* Update value preserving existing TTL
*/
update?<T>(key: string, value: T): Promise<T | null>;
/**
* Set value only if key exists
*/
setIfExists?<T>(key: string, value: T, ttl?: number): Promise<boolean>;
/**
* Set value only if key doesn't exist
*/
setIfNotExists?<T>(key: string, value: T, ttl?: number): Promise<boolean>;
/**
* Replace existing key's value and TTL
*/
replace?<T>(key: string, value: T, ttl?: number): Promise<T | null>;
/**
* Atomically update field with transformation function
*/
updateField?<T>(key: string, updater: (current: T | null) => T, ttl?: number): Promise<T | null>;
}
export interface CacheOptions {
ttl?: number;
keyPrefix?: string;
enableMetrics?: boolean;
name?: string; // Name for connection identification
shared?: boolean; // Whether to use shared connection
}
export interface CacheStats {
hits: number;
misses: number;
errors: number;
hitRate: number;
total: number;
uptime: number;
}
export interface CacheConfig {
type: 'redis';
keyPrefix?: string;
defaultTTL?: number;
enableMetrics?: boolean;
compression?: boolean;
}
export type CacheKey = string | (() => string);
export interface SerializationOptions {
compress?: boolean;
binary?: boolean;
}

View file

@ -1,111 +1,118 @@
/**
* Admin interfaces configuration using Yup
* PgAdmin, Mongo Express, Redis Insight for database management
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, strWithChoices } = envValidators;
/**
* PgAdmin configuration with validation and defaults
*/
export const pgAdminConfig = cleanEnv(process.env, {
// PgAdmin Server
PGADMIN_HOST: str('localhost', 'PgAdmin host'),
PGADMIN_PORT: port(8080, 'PgAdmin port'),
// Authentication
PGADMIN_DEFAULT_EMAIL: str('admin@tradingbot.local', 'PgAdmin default admin email'),
PGADMIN_DEFAULT_PASSWORD: str('admin123', 'PgAdmin default admin password'),
// Configuration
PGADMIN_SERVER_MODE: bool(false, 'Enable server mode (multi-user)'),
PGADMIN_DISABLE_POSTFIX: bool(true, 'Disable postfix for email'),
PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION: bool(true, 'Enhanced cookie protection'),
// Security
PGADMIN_MASTER_PASSWORD_REQUIRED: bool(false, 'Require master password'),
PGADMIN_SESSION_TIMEOUT: str('60', 'Session timeout in minutes'),
});
/**
* Mongo Express configuration with validation and defaults
*/
export const mongoExpressConfig = cleanEnv(process.env, {
// Mongo Express Server
MONGO_EXPRESS_HOST: str('localhost', 'Mongo Express host'),
MONGO_EXPRESS_PORT: port(8081, 'Mongo Express port'),
// MongoDB Connection
MONGO_EXPRESS_MONGODB_SERVER: str('mongodb', 'MongoDB server name/host'),
MONGO_EXPRESS_MONGODB_PORT: port(27017, 'MongoDB port'),
MONGO_EXPRESS_MONGODB_ADMINUSERNAME: str('trading_admin', 'MongoDB admin username'),
MONGO_EXPRESS_MONGODB_ADMINPASSWORD: str('', 'MongoDB admin password'),
// Basic Authentication for Mongo Express
MONGO_EXPRESS_BASICAUTH_USERNAME: str('admin', 'Basic auth username for Mongo Express'),
MONGO_EXPRESS_BASICAUTH_PASSWORD: str('admin123', 'Basic auth password for Mongo Express'),
// Configuration
MONGO_EXPRESS_ENABLE_ADMIN: bool(true, 'Enable admin features'),
MONGO_EXPRESS_OPTIONS_EDITOR_THEME: str('rubyblue', 'Editor theme (rubyblue, 3024-night, etc.)'),
MONGO_EXPRESS_REQUEST_SIZE: str('100kb', 'Maximum request size'),
});
/**
* Redis Insight configuration with validation and defaults
*/
export const redisInsightConfig = cleanEnv(process.env, {
// Redis Insight Server
REDIS_INSIGHT_HOST: str('localhost', 'Redis Insight host'),
REDIS_INSIGHT_PORT: port(8001, 'Redis Insight port'),
// Redis Connection Settings
REDIS_INSIGHT_REDIS_HOSTS: str('local:dragonfly:6379', 'Redis hosts in format name:host:port,name:host:port'),
// Configuration
REDIS_INSIGHT_LOG_LEVEL: strWithChoices(['error', 'warn', 'info', 'verbose', 'debug'], 'info', 'Redis Insight log level'),
REDIS_INSIGHT_DISABLE_ANALYTICS: bool(true, 'Disable analytics collection'),
REDIS_INSIGHT_BUILD_TYPE: str('DOCKER', 'Build type identifier'),
});
// Export typed configuration objects
export type PgAdminConfig = typeof pgAdminConfig;
export type MongoExpressConfig = typeof mongoExpressConfig;
export type RedisInsightConfig = typeof redisInsightConfig;
// Export individual config values for convenience
export const {
PGADMIN_HOST,
PGADMIN_PORT,
PGADMIN_DEFAULT_EMAIL,
PGADMIN_DEFAULT_PASSWORD,
PGADMIN_SERVER_MODE,
PGADMIN_DISABLE_POSTFIX,
PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION,
PGADMIN_MASTER_PASSWORD_REQUIRED,
PGADMIN_SESSION_TIMEOUT,
} = pgAdminConfig;
export const {
MONGO_EXPRESS_HOST,
MONGO_EXPRESS_PORT,
MONGO_EXPRESS_MONGODB_SERVER,
MONGO_EXPRESS_MONGODB_PORT,
MONGO_EXPRESS_MONGODB_ADMINUSERNAME,
MONGO_EXPRESS_MONGODB_ADMINPASSWORD,
MONGO_EXPRESS_BASICAUTH_USERNAME,
MONGO_EXPRESS_BASICAUTH_PASSWORD,
MONGO_EXPRESS_ENABLE_ADMIN,
MONGO_EXPRESS_OPTIONS_EDITOR_THEME,
MONGO_EXPRESS_REQUEST_SIZE,
} = mongoExpressConfig;
export const {
REDIS_INSIGHT_HOST,
REDIS_INSIGHT_PORT,
REDIS_INSIGHT_REDIS_HOSTS,
REDIS_INSIGHT_LOG_LEVEL,
REDIS_INSIGHT_DISABLE_ANALYTICS,
REDIS_INSIGHT_BUILD_TYPE,
} = redisInsightConfig;
/**
* Admin interfaces configuration using Yup
* PgAdmin, Mongo Express, Redis Insight for database management
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, strWithChoices } = envValidators;
/**
* PgAdmin configuration with validation and defaults
*/
export const pgAdminConfig = cleanEnv(process.env, {
// PgAdmin Server
PGADMIN_HOST: str('localhost', 'PgAdmin host'),
PGADMIN_PORT: port(8080, 'PgAdmin port'),
// Authentication
PGADMIN_DEFAULT_EMAIL: str('admin@tradingbot.local', 'PgAdmin default admin email'),
PGADMIN_DEFAULT_PASSWORD: str('admin123', 'PgAdmin default admin password'),
// Configuration
PGADMIN_SERVER_MODE: bool(false, 'Enable server mode (multi-user)'),
PGADMIN_DISABLE_POSTFIX: bool(true, 'Disable postfix for email'),
PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION: bool(true, 'Enhanced cookie protection'),
// Security
PGADMIN_MASTER_PASSWORD_REQUIRED: bool(false, 'Require master password'),
PGADMIN_SESSION_TIMEOUT: str('60', 'Session timeout in minutes'),
});
/**
* Mongo Express configuration with validation and defaults
*/
export const mongoExpressConfig = cleanEnv(process.env, {
// Mongo Express Server
MONGO_EXPRESS_HOST: str('localhost', 'Mongo Express host'),
MONGO_EXPRESS_PORT: port(8081, 'Mongo Express port'),
// MongoDB Connection
MONGO_EXPRESS_MONGODB_SERVER: str('mongodb', 'MongoDB server name/host'),
MONGO_EXPRESS_MONGODB_PORT: port(27017, 'MongoDB port'),
MONGO_EXPRESS_MONGODB_ADMINUSERNAME: str('trading_admin', 'MongoDB admin username'),
MONGO_EXPRESS_MONGODB_ADMINPASSWORD: str('', 'MongoDB admin password'),
// Basic Authentication for Mongo Express
MONGO_EXPRESS_BASICAUTH_USERNAME: str('admin', 'Basic auth username for Mongo Express'),
MONGO_EXPRESS_BASICAUTH_PASSWORD: str('admin123', 'Basic auth password for Mongo Express'),
// Configuration
MONGO_EXPRESS_ENABLE_ADMIN: bool(true, 'Enable admin features'),
MONGO_EXPRESS_OPTIONS_EDITOR_THEME: str('rubyblue', 'Editor theme (rubyblue, 3024-night, etc.)'),
MONGO_EXPRESS_REQUEST_SIZE: str('100kb', 'Maximum request size'),
});
/**
* Redis Insight configuration with validation and defaults
*/
export const redisInsightConfig = cleanEnv(process.env, {
// Redis Insight Server
REDIS_INSIGHT_HOST: str('localhost', 'Redis Insight host'),
REDIS_INSIGHT_PORT: port(8001, 'Redis Insight port'),
// Redis Connection Settings
REDIS_INSIGHT_REDIS_HOSTS: str(
'local:dragonfly:6379',
'Redis hosts in format name:host:port,name:host:port'
),
// Configuration
REDIS_INSIGHT_LOG_LEVEL: strWithChoices(
['error', 'warn', 'info', 'verbose', 'debug'],
'info',
'Redis Insight log level'
),
REDIS_INSIGHT_DISABLE_ANALYTICS: bool(true, 'Disable analytics collection'),
REDIS_INSIGHT_BUILD_TYPE: str('DOCKER', 'Build type identifier'),
});
// Export typed configuration objects
export type PgAdminConfig = typeof pgAdminConfig;
export type MongoExpressConfig = typeof mongoExpressConfig;
export type RedisInsightConfig = typeof redisInsightConfig;
// Export individual config values for convenience
export const {
PGADMIN_HOST,
PGADMIN_PORT,
PGADMIN_DEFAULT_EMAIL,
PGADMIN_DEFAULT_PASSWORD,
PGADMIN_SERVER_MODE,
PGADMIN_DISABLE_POSTFIX,
PGADMIN_CONFIG_ENHANCED_COOKIE_PROTECTION,
PGADMIN_MASTER_PASSWORD_REQUIRED,
PGADMIN_SESSION_TIMEOUT,
} = pgAdminConfig;
export const {
MONGO_EXPRESS_HOST,
MONGO_EXPRESS_PORT,
MONGO_EXPRESS_MONGODB_SERVER,
MONGO_EXPRESS_MONGODB_PORT,
MONGO_EXPRESS_MONGODB_ADMINUSERNAME,
MONGO_EXPRESS_MONGODB_ADMINPASSWORD,
MONGO_EXPRESS_BASICAUTH_USERNAME,
MONGO_EXPRESS_BASICAUTH_PASSWORD,
MONGO_EXPRESS_ENABLE_ADMIN,
MONGO_EXPRESS_OPTIONS_EDITOR_THEME,
MONGO_EXPRESS_REQUEST_SIZE,
} = mongoExpressConfig;
export const {
REDIS_INSIGHT_HOST,
REDIS_INSIGHT_PORT,
REDIS_INSIGHT_REDIS_HOSTS,
REDIS_INSIGHT_LOG_LEVEL,
REDIS_INSIGHT_DISABLE_ANALYTICS,
REDIS_INSIGHT_BUILD_TYPE,
} = redisInsightConfig;

View file

@ -1,68 +1,63 @@
/**
* Core configuration module for the Stock Bot platform using Yup
*/
import { config as dotenvConfig } from 'dotenv';
import path from 'node:path';
/**
* Represents an error related to configuration validation
*/
export class ConfigurationError extends Error {
constructor(message: string) {
super(message);
this.name = 'ConfigurationError';
}
}
/**
* Environment types
*/
export enum Environment {
Development = 'development',
Testing = 'testing',
Staging = 'staging',
Production = 'production'
}
/**
* Loads environment variables from .env files based on the current environment
*/
export function loadEnvVariables(envOverride?: string): void {
const env = envOverride || process.env.NODE_ENV || 'development';
console.log(`Current environment: ${env}`);
// Order of loading:
// 1. .env (base environment variables)
// 2. .env.{environment} (environment-specific variables)
// 3. .env.local (local overrides, not to be committed)
const envFiles = [
'.env',
`.env.${env}`,
'.env.local'
];
for (const file of envFiles) {
dotenvConfig({ path: path.resolve(process.cwd(), file) });
}
}
/**
* Gets the current environment from process.env.NODE_ENV
*/
export function getEnvironment(): Environment {
const env = process.env.NODE_ENV?.toLowerCase() || 'development';
switch (env) {
case 'development':
return Environment.Development;
case 'testing':
case 'test': // Handle both 'test' and 'testing' for compatibility
return Environment.Testing;
case 'staging':
return Environment.Staging;
case 'production':
return Environment.Production;
default:
return Environment.Development;
}
}
/**
* Core configuration module for the Stock Bot platform using Yup
*/
import path from 'node:path';
import { config as dotenvConfig } from 'dotenv';
/**
* Represents an error related to configuration validation
*/
export class ConfigurationError extends Error {
constructor(message: string) {
super(message);
this.name = 'ConfigurationError';
}
}
/**
* Environment types
*/
export enum Environment {
Development = 'development',
Testing = 'testing',
Staging = 'staging',
Production = 'production',
}
/**
* Loads environment variables from .env files based on the current environment
*/
export function loadEnvVariables(envOverride?: string): void {
const env = envOverride || process.env.NODE_ENV || 'development';
console.log(`Current environment: ${env}`);
// Order of loading:
// 1. .env (base environment variables)
// 2. .env.{environment} (environment-specific variables)
// 3. .env.local (local overrides, not to be committed)
const envFiles = ['.env', `.env.${env}`, '.env.local'];
for (const file of envFiles) {
dotenvConfig({ path: path.resolve(process.cwd(), file) });
}
}
/**
* Gets the current environment from process.env.NODE_ENV
*/
export function getEnvironment(): Environment {
const env = process.env.NODE_ENV?.toLowerCase() || 'development';
switch (env) {
case 'development':
return Environment.Development;
case 'testing':
case 'test': // Handle both 'test' and 'testing' for compatibility
return Environment.Testing;
case 'staging':
return Environment.Staging;
case 'production':
return Environment.Production;
default:
return Environment.Development;
}
}

View file

@ -1,184 +1,185 @@
/**
* Data provider configurations using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, num, bool, strWithChoices } = envValidators;
export interface ProviderConfig {
name: string;
type: 'rest' | 'websocket';
enabled: boolean;
baseUrl?: string;
apiKey?: string;
apiSecret?: string;
rateLimits?: {
maxRequestsPerMinute?: number;
maxRequestsPerSecond?: number;
maxRequestsPerHour?: number;
};
}
/**
* Data providers configuration with validation and defaults
*/
export const dataProvidersConfig = cleanEnv(process.env, {
// Default Provider
DEFAULT_DATA_PROVIDER: strWithChoices(['alpaca', 'polygon', 'yahoo', 'iex'], 'alpaca', 'Default data provider'),
// Alpaca Configuration
ALPACA_API_KEY: str('', 'Alpaca API key'),
ALPACA_API_SECRET: str('', 'Alpaca API secret'),
ALPACA_BASE_URL: str('https://data.alpaca.markets/v1beta1', 'Alpaca base URL'),
ALPACA_RATE_LIMIT: num(200, 'Alpaca rate limit per minute'),
ALPACA_ENABLED: bool(true, 'Enable Alpaca provider'),
// Polygon Configuration
POLYGON_API_KEY: str('', 'Polygon API key'),
POLYGON_BASE_URL: str('https://api.polygon.io', 'Polygon base URL'),
POLYGON_RATE_LIMIT: num(5, 'Polygon rate limit per minute'),
POLYGON_ENABLED: bool(false, 'Enable Polygon provider'),
// Yahoo Finance Configuration
YAHOO_BASE_URL: str('https://query1.finance.yahoo.com', 'Yahoo Finance base URL'),
YAHOO_RATE_LIMIT: num(2000, 'Yahoo Finance rate limit per hour'),
YAHOO_ENABLED: bool(true, 'Enable Yahoo Finance provider'),
// IEX Cloud Configuration
IEX_API_KEY: str('', 'IEX Cloud API key'),
IEX_BASE_URL: str('https://cloud.iexapis.com/stable', 'IEX Cloud base URL'),
IEX_RATE_LIMIT: num(100, 'IEX Cloud rate limit per second'),
IEX_ENABLED: bool(false, 'Enable IEX Cloud provider'),
// Connection Settings
DATA_PROVIDER_TIMEOUT: num(30000, 'Request timeout in milliseconds'),
DATA_PROVIDER_RETRIES: num(3, 'Number of retry attempts'),
DATA_PROVIDER_RETRY_DELAY: num(1000, 'Retry delay in milliseconds'),
// Cache Settings
DATA_CACHE_ENABLED: bool(true, 'Enable data caching'),
DATA_CACHE_TTL: num(300000, 'Cache TTL in milliseconds'),
DATA_CACHE_MAX_SIZE: num(1000, 'Maximum cache entries'),
});
/**
* Helper function to get provider-specific configuration
*/
export function getProviderConfig(providerName: string) {
// make a interface for the provider config
const name = providerName.toUpperCase();
switch (name) {
case 'ALPACA':
return {
name: 'alpaca',
type: 'rest' as const,
enabled: dataProvidersConfig.ALPACA_ENABLED,
baseUrl: dataProvidersConfig.ALPACA_BASE_URL,
apiKey: dataProvidersConfig.ALPACA_API_KEY,
apiSecret: dataProvidersConfig.ALPACA_API_SECRET,
rateLimits: {
maxRequestsPerMinute: dataProvidersConfig.ALPACA_RATE_LIMIT
}
};
case 'POLYGON':
return {
name: 'polygon',
type: 'rest' as const,
enabled: dataProvidersConfig.POLYGON_ENABLED,
baseUrl: dataProvidersConfig.POLYGON_BASE_URL,
apiKey: dataProvidersConfig.POLYGON_API_KEY,
rateLimits: {
maxRequestsPerMinute: dataProvidersConfig.POLYGON_RATE_LIMIT
}
};
case 'YAHOO':
return {
name: 'yahoo',
type: 'rest' as const,
enabled: dataProvidersConfig.YAHOO_ENABLED,
baseUrl: dataProvidersConfig.YAHOO_BASE_URL,
rateLimits: {
maxRequestsPerHour: dataProvidersConfig.YAHOO_RATE_LIMIT
}
};
case 'IEX':
return {
name: 'iex',
type: 'rest' as const,
enabled: dataProvidersConfig.IEX_ENABLED,
baseUrl: dataProvidersConfig.IEX_BASE_URL,
apiKey: dataProvidersConfig.IEX_API_KEY,
rateLimits: {
maxRequestsPerSecond: dataProvidersConfig.IEX_RATE_LIMIT
}
};
default:
throw new Error(`Unknown provider: ${providerName}`);
}
}
/**
* Get all enabled providers
*/
export function getEnabledProviders() {
const providers = ['alpaca', 'polygon', 'yahoo', 'iex'];
return providers
.map(provider => getProviderConfig(provider))
.filter(config => config.enabled);
}
/**
* Get the default provider configuration
*/
export function getDefaultProvider() {
return getProviderConfig(dataProvidersConfig.DEFAULT_DATA_PROVIDER);
}
// Export typed configuration object
export type DataProvidersConfig = typeof dataProvidersConfig;
export class DataProviders {
static getProviderConfig(providerName: string): ProviderConfig {
return getProviderConfig(providerName);
}
static getEnabledProviders(): ProviderConfig[] {
return getEnabledProviders();
}
static getDefaultProvider(): ProviderConfig {
return getDefaultProvider();
}
}
// Export individual config values for convenience
export const {
DEFAULT_DATA_PROVIDER,
ALPACA_API_KEY,
ALPACA_API_SECRET,
ALPACA_BASE_URL,
ALPACA_RATE_LIMIT,
ALPACA_ENABLED,
POLYGON_API_KEY,
POLYGON_BASE_URL,
POLYGON_RATE_LIMIT,
POLYGON_ENABLED,
YAHOO_BASE_URL,
YAHOO_RATE_LIMIT,
YAHOO_ENABLED,
IEX_API_KEY,
IEX_BASE_URL,
IEX_RATE_LIMIT,
IEX_ENABLED,
DATA_PROVIDER_TIMEOUT,
DATA_PROVIDER_RETRIES,
DATA_PROVIDER_RETRY_DELAY,
DATA_CACHE_ENABLED,
DATA_CACHE_TTL,
DATA_CACHE_MAX_SIZE,
} = dataProvidersConfig;
/**
* Data provider configurations using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, num, bool, strWithChoices } = envValidators;
export interface ProviderConfig {
name: string;
type: 'rest' | 'websocket';
enabled: boolean;
baseUrl?: string;
apiKey?: string;
apiSecret?: string;
rateLimits?: {
maxRequestsPerMinute?: number;
maxRequestsPerSecond?: number;
maxRequestsPerHour?: number;
};
}
/**
* Data providers configuration with validation and defaults
*/
export const dataProvidersConfig = cleanEnv(process.env, {
// Default Provider
DEFAULT_DATA_PROVIDER: strWithChoices(
['alpaca', 'polygon', 'yahoo', 'iex'],
'alpaca',
'Default data provider'
),
// Alpaca Configuration
ALPACA_API_KEY: str('', 'Alpaca API key'),
ALPACA_API_SECRET: str('', 'Alpaca API secret'),
ALPACA_BASE_URL: str('https://data.alpaca.markets/v1beta1', 'Alpaca base URL'),
ALPACA_RATE_LIMIT: num(200, 'Alpaca rate limit per minute'),
ALPACA_ENABLED: bool(true, 'Enable Alpaca provider'),
// Polygon Configuration
POLYGON_API_KEY: str('', 'Polygon API key'),
POLYGON_BASE_URL: str('https://api.polygon.io', 'Polygon base URL'),
POLYGON_RATE_LIMIT: num(5, 'Polygon rate limit per minute'),
POLYGON_ENABLED: bool(false, 'Enable Polygon provider'),
// Yahoo Finance Configuration
YAHOO_BASE_URL: str('https://query1.finance.yahoo.com', 'Yahoo Finance base URL'),
YAHOO_RATE_LIMIT: num(2000, 'Yahoo Finance rate limit per hour'),
YAHOO_ENABLED: bool(true, 'Enable Yahoo Finance provider'),
// IEX Cloud Configuration
IEX_API_KEY: str('', 'IEX Cloud API key'),
IEX_BASE_URL: str('https://cloud.iexapis.com/stable', 'IEX Cloud base URL'),
IEX_RATE_LIMIT: num(100, 'IEX Cloud rate limit per second'),
IEX_ENABLED: bool(false, 'Enable IEX Cloud provider'),
// Connection Settings
DATA_PROVIDER_TIMEOUT: num(30000, 'Request timeout in milliseconds'),
DATA_PROVIDER_RETRIES: num(3, 'Number of retry attempts'),
DATA_PROVIDER_RETRY_DELAY: num(1000, 'Retry delay in milliseconds'),
// Cache Settings
DATA_CACHE_ENABLED: bool(true, 'Enable data caching'),
DATA_CACHE_TTL: num(300000, 'Cache TTL in milliseconds'),
DATA_CACHE_MAX_SIZE: num(1000, 'Maximum cache entries'),
});
/**
* Helper function to get provider-specific configuration
*/
export function getProviderConfig(providerName: string) {
// make a interface for the provider config
const name = providerName.toUpperCase();
switch (name) {
case 'ALPACA':
return {
name: 'alpaca',
type: 'rest' as const,
enabled: dataProvidersConfig.ALPACA_ENABLED,
baseUrl: dataProvidersConfig.ALPACA_BASE_URL,
apiKey: dataProvidersConfig.ALPACA_API_KEY,
apiSecret: dataProvidersConfig.ALPACA_API_SECRET,
rateLimits: {
maxRequestsPerMinute: dataProvidersConfig.ALPACA_RATE_LIMIT,
},
};
case 'POLYGON':
return {
name: 'polygon',
type: 'rest' as const,
enabled: dataProvidersConfig.POLYGON_ENABLED,
baseUrl: dataProvidersConfig.POLYGON_BASE_URL,
apiKey: dataProvidersConfig.POLYGON_API_KEY,
rateLimits: {
maxRequestsPerMinute: dataProvidersConfig.POLYGON_RATE_LIMIT,
},
};
case 'YAHOO':
return {
name: 'yahoo',
type: 'rest' as const,
enabled: dataProvidersConfig.YAHOO_ENABLED,
baseUrl: dataProvidersConfig.YAHOO_BASE_URL,
rateLimits: {
maxRequestsPerHour: dataProvidersConfig.YAHOO_RATE_LIMIT,
},
};
case 'IEX':
return {
name: 'iex',
type: 'rest' as const,
enabled: dataProvidersConfig.IEX_ENABLED,
baseUrl: dataProvidersConfig.IEX_BASE_URL,
apiKey: dataProvidersConfig.IEX_API_KEY,
rateLimits: {
maxRequestsPerSecond: dataProvidersConfig.IEX_RATE_LIMIT,
},
};
default:
throw new Error(`Unknown provider: ${providerName}`);
}
}
/**
* Get all enabled providers
*/
export function getEnabledProviders() {
const providers = ['alpaca', 'polygon', 'yahoo', 'iex'];
return providers.map(provider => getProviderConfig(provider)).filter(config => config.enabled);
}
/**
* Get the default provider configuration
*/
export function getDefaultProvider() {
return getProviderConfig(dataProvidersConfig.DEFAULT_DATA_PROVIDER);
}
// Export typed configuration object
export type DataProvidersConfig = typeof dataProvidersConfig;
export class DataProviders {
static getProviderConfig(providerName: string): ProviderConfig {
return getProviderConfig(providerName);
}
static getEnabledProviders(): ProviderConfig[] {
return getEnabledProviders();
}
static getDefaultProvider(): ProviderConfig {
return getDefaultProvider();
}
}
// Export individual config values for convenience
export const {
DEFAULT_DATA_PROVIDER,
ALPACA_API_KEY,
ALPACA_API_SECRET,
ALPACA_BASE_URL,
ALPACA_RATE_LIMIT,
ALPACA_ENABLED,
POLYGON_API_KEY,
POLYGON_BASE_URL,
POLYGON_RATE_LIMIT,
POLYGON_ENABLED,
YAHOO_BASE_URL,
YAHOO_RATE_LIMIT,
YAHOO_ENABLED,
IEX_API_KEY,
IEX_BASE_URL,
IEX_RATE_LIMIT,
IEX_ENABLED,
DATA_PROVIDER_TIMEOUT,
DATA_PROVIDER_RETRIES,
DATA_PROVIDER_RETRY_DELAY,
DATA_CACHE_ENABLED,
DATA_CACHE_TTL,
DATA_CACHE_MAX_SIZE,
} = dataProvidersConfig;

View file

@ -1,56 +1,56 @@
/**
* Database configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, num, bool } = envValidators;
/**
* Database configuration with validation and defaults
*/
export const databaseConfig = cleanEnv(process.env, {
// PostgreSQL Configuration
DB_HOST: str('localhost', 'Database host'),
DB_PORT: port(5432, 'Database port'),
DB_NAME: str('stockbot', 'Database name'),
DB_USER: str('stockbot', 'Database user'),
DB_PASSWORD: str('', 'Database password'),
// Connection Pool Settings
DB_POOL_MIN: num(2, 'Minimum pool connections'),
DB_POOL_MAX: num(10, 'Maximum pool connections'),
DB_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'),
// SSL Configuration
DB_SSL: bool(false, 'Enable SSL for database connection'),
DB_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'),
// Additional Settings
DB_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'),
DB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
DB_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'),
DB_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'),
DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'),
});
// Export typed configuration object
export type DatabaseConfig = typeof databaseConfig;
// Export individual config values for convenience
export const {
DB_HOST,
DB_PORT,
DB_NAME,
DB_USER,
DB_PASSWORD,
DB_POOL_MIN,
DB_POOL_MAX,
DB_POOL_IDLE_TIMEOUT,
DB_SSL,
DB_SSL_REJECT_UNAUTHORIZED,
DB_QUERY_TIMEOUT,
DB_CONNECTION_TIMEOUT,
DB_STATEMENT_TIMEOUT,
DB_LOCK_TIMEOUT,
DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
} = databaseConfig;
/**
* Database configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, num, bool } = envValidators;
/**
* Database configuration with validation and defaults
*/
export const databaseConfig = cleanEnv(process.env, {
// PostgreSQL Configuration
DB_HOST: str('localhost', 'Database host'),
DB_PORT: port(5432, 'Database port'),
DB_NAME: str('stockbot', 'Database name'),
DB_USER: str('stockbot', 'Database user'),
DB_PASSWORD: str('', 'Database password'),
// Connection Pool Settings
DB_POOL_MIN: num(2, 'Minimum pool connections'),
DB_POOL_MAX: num(10, 'Maximum pool connections'),
DB_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'),
// SSL Configuration
DB_SSL: bool(false, 'Enable SSL for database connection'),
DB_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'),
// Additional Settings
DB_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'),
DB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
DB_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'),
DB_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'),
DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'),
});
// Export typed configuration object
export type DatabaseConfig = typeof databaseConfig;
// Export individual config values for convenience
export const {
DB_HOST,
DB_PORT,
DB_NAME,
DB_USER,
DB_PASSWORD,
DB_POOL_MIN,
DB_POOL_MAX,
DB_POOL_IDLE_TIMEOUT,
DB_SSL,
DB_SSL_REJECT_UNAUTHORIZED,
DB_QUERY_TIMEOUT,
DB_CONNECTION_TIMEOUT,
DB_STATEMENT_TIMEOUT,
DB_LOCK_TIMEOUT,
DB_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
} = databaseConfig;

View file

@ -1,81 +1,81 @@
/**
* Dragonfly (Redis replacement) configuration using Yup
* High-performance caching and event streaming
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, num, bool } = envValidators;
/**
* Dragonfly configuration with validation and defaults
*/
export const dragonflyConfig = cleanEnv(process.env, {
// Dragonfly Connection
DRAGONFLY_HOST: str('localhost', 'Dragonfly host'),
DRAGONFLY_PORT: port(6379, 'Dragonfly port'),
DRAGONFLY_PASSWORD: str('', 'Dragonfly password (if auth enabled)'),
DRAGONFLY_USERNAME: str('', 'Dragonfly username (if ACL enabled)'),
// Database Selection
DRAGONFLY_DATABASE: num(0, 'Dragonfly database number (0-15)'),
// Connection Pool Settings
DRAGONFLY_MAX_RETRIES: num(3, 'Maximum retry attempts'),
DRAGONFLY_RETRY_DELAY: num(50, 'Retry delay in ms'),
DRAGONFLY_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'),
DRAGONFLY_COMMAND_TIMEOUT: num(5000, 'Command timeout in ms'),
// Pool Configuration
DRAGONFLY_POOL_SIZE: num(10, 'Connection pool size'),
DRAGONFLY_POOL_MIN: num(1, 'Minimum pool connections'),
DRAGONFLY_POOL_MAX: num(20, 'Maximum pool connections'),
// TLS Settings
DRAGONFLY_TLS: bool(false, 'Enable TLS for Dragonfly connection'),
DRAGONFLY_TLS_CERT_FILE: str('', 'Path to TLS certificate file'),
DRAGONFLY_TLS_KEY_FILE: str('', 'Path to TLS key file'),
DRAGONFLY_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'),
DRAGONFLY_TLS_SKIP_VERIFY: bool(false, 'Skip TLS certificate verification'),
// Performance Settings
DRAGONFLY_ENABLE_KEEPALIVE: bool(true, 'Enable TCP keepalive'),
DRAGONFLY_KEEPALIVE_INTERVAL: num(60, 'Keepalive interval in seconds'),
// Clustering (if using cluster mode)
DRAGONFLY_CLUSTER_MODE: bool(false, 'Enable cluster mode'),
DRAGONFLY_CLUSTER_NODES: str('', 'Comma-separated list of cluster nodes (host:port)'),
// Memory and Cache Settings
DRAGONFLY_MAX_MEMORY: str('2gb', 'Maximum memory usage'),
DRAGONFLY_CACHE_MODE: bool(true, 'Enable cache mode'),
});
// Export typed configuration object
export type DragonflyConfig = typeof dragonflyConfig;
// Export individual config values for convenience
export const {
DRAGONFLY_HOST,
DRAGONFLY_PORT,
DRAGONFLY_PASSWORD,
DRAGONFLY_USERNAME,
DRAGONFLY_DATABASE,
DRAGONFLY_MAX_RETRIES,
DRAGONFLY_RETRY_DELAY,
DRAGONFLY_CONNECT_TIMEOUT,
DRAGONFLY_COMMAND_TIMEOUT,
DRAGONFLY_POOL_SIZE,
DRAGONFLY_POOL_MIN,
DRAGONFLY_POOL_MAX,
DRAGONFLY_TLS,
DRAGONFLY_TLS_CERT_FILE,
DRAGONFLY_TLS_KEY_FILE,
DRAGONFLY_TLS_CA_FILE,
DRAGONFLY_TLS_SKIP_VERIFY,
DRAGONFLY_ENABLE_KEEPALIVE,
DRAGONFLY_KEEPALIVE_INTERVAL,
DRAGONFLY_CLUSTER_MODE,
DRAGONFLY_CLUSTER_NODES,
DRAGONFLY_MAX_MEMORY,
DRAGONFLY_CACHE_MODE,
} = dragonflyConfig;
/**
* Dragonfly (Redis replacement) configuration using Yup
* High-performance caching and event streaming
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, num, bool } = envValidators;
/**
* Dragonfly configuration with validation and defaults
*/
export const dragonflyConfig = cleanEnv(process.env, {
// Dragonfly Connection
DRAGONFLY_HOST: str('localhost', 'Dragonfly host'),
DRAGONFLY_PORT: port(6379, 'Dragonfly port'),
DRAGONFLY_PASSWORD: str('', 'Dragonfly password (if auth enabled)'),
DRAGONFLY_USERNAME: str('', 'Dragonfly username (if ACL enabled)'),
// Database Selection
DRAGONFLY_DATABASE: num(0, 'Dragonfly database number (0-15)'),
// Connection Pool Settings
DRAGONFLY_MAX_RETRIES: num(3, 'Maximum retry attempts'),
DRAGONFLY_RETRY_DELAY: num(50, 'Retry delay in ms'),
DRAGONFLY_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'),
DRAGONFLY_COMMAND_TIMEOUT: num(5000, 'Command timeout in ms'),
// Pool Configuration
DRAGONFLY_POOL_SIZE: num(10, 'Connection pool size'),
DRAGONFLY_POOL_MIN: num(1, 'Minimum pool connections'),
DRAGONFLY_POOL_MAX: num(20, 'Maximum pool connections'),
// TLS Settings
DRAGONFLY_TLS: bool(false, 'Enable TLS for Dragonfly connection'),
DRAGONFLY_TLS_CERT_FILE: str('', 'Path to TLS certificate file'),
DRAGONFLY_TLS_KEY_FILE: str('', 'Path to TLS key file'),
DRAGONFLY_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'),
DRAGONFLY_TLS_SKIP_VERIFY: bool(false, 'Skip TLS certificate verification'),
// Performance Settings
DRAGONFLY_ENABLE_KEEPALIVE: bool(true, 'Enable TCP keepalive'),
DRAGONFLY_KEEPALIVE_INTERVAL: num(60, 'Keepalive interval in seconds'),
// Clustering (if using cluster mode)
DRAGONFLY_CLUSTER_MODE: bool(false, 'Enable cluster mode'),
DRAGONFLY_CLUSTER_NODES: str('', 'Comma-separated list of cluster nodes (host:port)'),
// Memory and Cache Settings
DRAGONFLY_MAX_MEMORY: str('2gb', 'Maximum memory usage'),
DRAGONFLY_CACHE_MODE: bool(true, 'Enable cache mode'),
});
// Export typed configuration object
export type DragonflyConfig = typeof dragonflyConfig;
// Export individual config values for convenience
export const {
DRAGONFLY_HOST,
DRAGONFLY_PORT,
DRAGONFLY_PASSWORD,
DRAGONFLY_USERNAME,
DRAGONFLY_DATABASE,
DRAGONFLY_MAX_RETRIES,
DRAGONFLY_RETRY_DELAY,
DRAGONFLY_CONNECT_TIMEOUT,
DRAGONFLY_COMMAND_TIMEOUT,
DRAGONFLY_POOL_SIZE,
DRAGONFLY_POOL_MIN,
DRAGONFLY_POOL_MAX,
DRAGONFLY_TLS,
DRAGONFLY_TLS_CERT_FILE,
DRAGONFLY_TLS_KEY_FILE,
DRAGONFLY_TLS_CA_FILE,
DRAGONFLY_TLS_SKIP_VERIFY,
DRAGONFLY_ENABLE_KEEPALIVE,
DRAGONFLY_KEEPALIVE_INTERVAL,
DRAGONFLY_CLUSTER_MODE,
DRAGONFLY_CLUSTER_NODES,
DRAGONFLY_MAX_MEMORY,
DRAGONFLY_CACHE_MODE,
} = dragonflyConfig;

View file

@ -1,162 +1,165 @@
/**
* Environment validation utilities using Yup
*/
import * as yup from 'yup';
import { config } from 'dotenv';
import { join } from 'path';
import { existsSync } from 'fs';
// Function to find and load environment variables
function loadEnvFiles() {
const cwd = process.cwd();
const possiblePaths = [
// Current working directory
join(cwd, '.env'),
join(cwd, '.env.local'),
// Root of the workspace (common pattern)
join(cwd, '../../.env'),
join(cwd, '../../../.env'),
// Config library directory
join(__dirname, '../.env'),
join(__dirname, '../../.env'),
join(__dirname, '../../../.env'),
];
// Try to load each possible .env file
for (const envPath of possiblePaths) {
if (existsSync(envPath)) {
console.log(`📄 Loading environment from: ${envPath}`);
config({ path: envPath });
break; // Use the first .env file found
}
}
// Also try to load environment-specific files
const environment = process.env.NODE_ENV || 'development';
const envSpecificPaths = [
join(cwd, `.env.${environment}`),
join(cwd, `.env.${environment}.local`),
];
for (const envPath of envSpecificPaths) {
if (existsSync(envPath)) {
console.log(`📄 Loading ${environment} environment from: ${envPath}`);
config({ path: envPath, override: false }); // Don't override existing vars
}
}
}
// Load environment variables
loadEnvFiles();
/**
* Creates a Yup schema for environment variable validation
*/
export function createEnvSchema(shape: Record<string, any>) {
return yup.object(shape);
}
/**
* Validates environment variables against a Yup schema
*/
export function validateEnv(
schema: yup.ObjectSchema<any>,
env = process.env
): any {
try {
const result = schema.validateSync(env, { abortEarly: false });
return result;
} catch (error) {
if (error instanceof yup.ValidationError) {
console.error('❌ Invalid environment variables:');
error.inner.forEach((err) => {
console.error(` ${err.path}: ${err.message}`);
});
}
throw new Error('Environment validation failed');
}
}
/**
* Manually load environment variables from a specific path
*/
export function loadEnv(path?: string) {
if (path) {
console.log(`📄 Manually loading environment from: ${path}`);
config({ path });
} else {
loadEnvFiles();
}
}
/**
* Helper functions for common validation patterns
*/
export const envValidators = {
// String with default
str: (defaultValue?: string, description?: string) =>
yup.string().default(defaultValue || ''),
// String with choices (enum)
strWithChoices: (choices: string[], defaultValue?: string, description?: string) =>
yup.string().oneOf(choices).default(defaultValue || choices[0]),
// Required string
requiredStr: (description?: string) =>
yup.string().required('Required'),
// Port number
port: (defaultValue?: number, description?: string) =>
yup.number()
.integer()
.min(1)
.max(65535)
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return parseInt(originalVal, 10);
}
return val;
})
.default(defaultValue || 3000),
// Number with default
num: (defaultValue?: number, description?: string) =>
yup.number()
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return parseFloat(originalVal);
}
return val;
})
.default(defaultValue || 0),
// Boolean with default
bool: (defaultValue?: boolean, description?: string) =>
yup.boolean()
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return originalVal === 'true' || originalVal === '1';
}
return val;
})
.default(defaultValue || false),
// URL validation
url: (defaultValue?: string, description?: string) =>
yup.string().url().default(defaultValue || 'http://localhost'),
// Email validation
email: (description?: string) =>
yup.string().email(),
};
/**
* Legacy compatibility - creates a cleanEnv-like function
*/
export function cleanEnv(
env: Record<string, string | undefined>,
validators: Record<string, any>
): any {
const schema = createEnvSchema(validators);
return validateEnv(schema, env);
}
/**
* Environment validation utilities using Yup
*/
import { existsSync } from 'fs';
import { join } from 'path';
import { config } from 'dotenv';
import * as yup from 'yup';
// Function to find and load environment variables
function loadEnvFiles() {
const cwd = process.cwd();
const possiblePaths = [
// Current working directory
join(cwd, '.env'),
join(cwd, '.env.local'),
// Root of the workspace (common pattern)
join(cwd, '../../.env'),
join(cwd, '../../../.env'),
// Config library directory
join(__dirname, '../.env'),
join(__dirname, '../../.env'),
join(__dirname, '../../../.env'),
];
// Try to load each possible .env file
for (const envPath of possiblePaths) {
if (existsSync(envPath)) {
console.log(`📄 Loading environment from: ${envPath}`);
config({ path: envPath });
break; // Use the first .env file found
}
}
// Also try to load environment-specific files
const environment = process.env.NODE_ENV || 'development';
const envSpecificPaths = [
join(cwd, `.env.${environment}`),
join(cwd, `.env.${environment}.local`),
];
for (const envPath of envSpecificPaths) {
if (existsSync(envPath)) {
console.log(`📄 Loading ${environment} environment from: ${envPath}`);
config({ path: envPath, override: false }); // Don't override existing vars
}
}
}
// Load environment variables
loadEnvFiles();
/**
* Creates a Yup schema for environment variable validation
*/
export function createEnvSchema(shape: Record<string, any>) {
return yup.object(shape);
}
/**
* Validates environment variables against a Yup schema
*/
export function validateEnv(schema: yup.ObjectSchema<any>, env = process.env): any {
try {
const result = schema.validateSync(env, { abortEarly: false });
return result;
} catch (error) {
if (error instanceof yup.ValidationError) {
console.error('❌ Invalid environment variables:');
error.inner.forEach(err => {
console.error(` ${err.path}: ${err.message}`);
});
}
throw new Error('Environment validation failed');
}
}
/**
* Manually load environment variables from a specific path
*/
export function loadEnv(path?: string) {
if (path) {
console.log(`📄 Manually loading environment from: ${path}`);
config({ path });
} else {
loadEnvFiles();
}
}
/**
* Helper functions for common validation patterns
*/
export const envValidators = {
// String with default
str: (defaultValue?: string, description?: string) => yup.string().default(defaultValue || ''),
// String with choices (enum)
strWithChoices: (choices: string[], defaultValue?: string, description?: string) =>
yup
.string()
.oneOf(choices)
.default(defaultValue || choices[0]),
// Required string
requiredStr: (description?: string) => yup.string().required('Required'),
// Port number
port: (defaultValue?: number, description?: string) =>
yup
.number()
.integer()
.min(1)
.max(65535)
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return parseInt(originalVal, 10);
}
return val;
})
.default(defaultValue || 3000),
// Number with default
num: (defaultValue?: number, description?: string) =>
yup
.number()
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return parseFloat(originalVal);
}
return val;
})
.default(defaultValue || 0),
// Boolean with default
bool: (defaultValue?: boolean, description?: string) =>
yup
.boolean()
.transform((val, originalVal) => {
if (typeof originalVal === 'string') {
return originalVal === 'true' || originalVal === '1';
}
return val;
})
.default(defaultValue || false),
// URL validation
url: (defaultValue?: string, description?: string) =>
yup
.string()
.url()
.default(defaultValue || 'http://localhost'),
// Email validation
email: (description?: string) => yup.string().email(),
};
/**
* Legacy compatibility - creates a cleanEnv-like function
*/
export function cleanEnv(
env: Record<string, string | undefined>,
validators: Record<string, any>
): any {
const schema = createEnvSchema(validators);
return validateEnv(schema, env);
}

View file

@ -1,20 +1,20 @@
/**
* @stock-bot/config
*
* Configuration management library for Stock Bot platform using Yup
*/
// Re-export everything from all modules
export * from './env-utils';
export * from './core';
export * from './admin-interfaces';
export * from './database';
export * from './dragonfly';
export * from './postgres';
export * from './questdb';
export * from './mongodb';
export * from './logging';
export * from './loki';
export * from './monitoring';
export * from './data-providers';
export * from './risk';
/**
* @stock-bot/config
*
* Configuration management library for Stock Bot platform using Yup
*/
// Re-export everything from all modules
export * from './env-utils';
export * from './core';
export * from './admin-interfaces';
export * from './database';
export * from './dragonfly';
export * from './postgres';
export * from './questdb';
export * from './mongodb';
export * from './logging';
export * from './loki';
export * from './monitoring';
export * from './data-providers';
export * from './risk';

View file

@ -1,74 +1,74 @@
/**
* Logging configuration using Yup
* Application logging settings without Loki (Loki config is in monitoring.ts)
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, bool, num, strWithChoices } = envValidators;
/**
* Logging configuration with validation and defaults
*/
export const loggingConfig = cleanEnv(process.env, {
// Basic Logging Settings
LOG_LEVEL: strWithChoices(['debug', 'info', 'warn', 'error'], 'info', 'Logging level'),
LOG_FORMAT: strWithChoices(['json', 'simple', 'combined'], 'json', 'Log output format'),
LOG_CONSOLE: bool(true, 'Enable console logging'),
LOG_FILE: bool(false, 'Enable file logging'),
// File Logging Settings
LOG_FILE_PATH: str('logs', 'Log file directory path'),
LOG_FILE_MAX_SIZE: str('20m', 'Maximum log file size'),
LOG_FILE_MAX_FILES: num(14, 'Maximum number of log files to keep'),
LOG_FILE_DATE_PATTERN: str('YYYY-MM-DD', 'Log file date pattern'),
// Error Logging
LOG_ERROR_FILE: bool(true, 'Enable separate error log file'),
LOG_ERROR_STACK: bool(true, 'Include stack traces in error logs'),
// Performance Logging
LOG_PERFORMANCE: bool(false, 'Enable performance logging'),
LOG_SQL_QUERIES: bool(false, 'Log SQL queries'),
LOG_HTTP_REQUESTS: bool(true, 'Log HTTP requests'),
// Structured Logging
LOG_STRUCTURED: bool(true, 'Use structured logging format'),
LOG_TIMESTAMP: bool(true, 'Include timestamps in logs'),
LOG_CALLER_INFO: bool(false, 'Include caller information in logs'),
// Log Filtering
LOG_SILENT_MODULES: str('', 'Comma-separated list of modules to silence'),
LOG_VERBOSE_MODULES: str('', 'Comma-separated list of modules for verbose logging'),
// Application Context
LOG_SERVICE_NAME: str('stock-bot', 'Service name for log context'),
LOG_SERVICE_VERSION: str('1.0.0', 'Service version for log context'),
LOG_ENVIRONMENT: str('development', 'Environment for log context'),
});
// Export typed configuration object
export type LoggingConfig = typeof loggingConfig;
// Export individual config values for convenience
export const {
LOG_LEVEL,
LOG_FORMAT,
LOG_CONSOLE,
LOG_FILE,
LOG_FILE_PATH,
LOG_FILE_MAX_SIZE,
LOG_FILE_MAX_FILES,
LOG_FILE_DATE_PATTERN,
LOG_ERROR_FILE,
LOG_ERROR_STACK,
LOG_PERFORMANCE,
LOG_SQL_QUERIES,
LOG_HTTP_REQUESTS,
LOG_STRUCTURED,
LOG_TIMESTAMP,
LOG_CALLER_INFO,
LOG_SILENT_MODULES,
LOG_VERBOSE_MODULES,
LOG_SERVICE_NAME,
LOG_SERVICE_VERSION,
LOG_ENVIRONMENT,
} = loggingConfig;
/**
* Logging configuration using Yup
* Application logging settings without Loki (Loki config is in monitoring.ts)
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, bool, num, strWithChoices } = envValidators;
/**
* Logging configuration with validation and defaults
*/
export const loggingConfig = cleanEnv(process.env, {
// Basic Logging Settings
LOG_LEVEL: strWithChoices(['debug', 'info', 'warn', 'error'], 'info', 'Logging level'),
LOG_FORMAT: strWithChoices(['json', 'simple', 'combined'], 'json', 'Log output format'),
LOG_CONSOLE: bool(true, 'Enable console logging'),
LOG_FILE: bool(false, 'Enable file logging'),
// File Logging Settings
LOG_FILE_PATH: str('logs', 'Log file directory path'),
LOG_FILE_MAX_SIZE: str('20m', 'Maximum log file size'),
LOG_FILE_MAX_FILES: num(14, 'Maximum number of log files to keep'),
LOG_FILE_DATE_PATTERN: str('YYYY-MM-DD', 'Log file date pattern'),
// Error Logging
LOG_ERROR_FILE: bool(true, 'Enable separate error log file'),
LOG_ERROR_STACK: bool(true, 'Include stack traces in error logs'),
// Performance Logging
LOG_PERFORMANCE: bool(false, 'Enable performance logging'),
LOG_SQL_QUERIES: bool(false, 'Log SQL queries'),
LOG_HTTP_REQUESTS: bool(true, 'Log HTTP requests'),
// Structured Logging
LOG_STRUCTURED: bool(true, 'Use structured logging format'),
LOG_TIMESTAMP: bool(true, 'Include timestamps in logs'),
LOG_CALLER_INFO: bool(false, 'Include caller information in logs'),
// Log Filtering
LOG_SILENT_MODULES: str('', 'Comma-separated list of modules to silence'),
LOG_VERBOSE_MODULES: str('', 'Comma-separated list of modules for verbose logging'),
// Application Context
LOG_SERVICE_NAME: str('stock-bot', 'Service name for log context'),
LOG_SERVICE_VERSION: str('1.0.0', 'Service version for log context'),
LOG_ENVIRONMENT: str('development', 'Environment for log context'),
});
// Export typed configuration object
export type LoggingConfig = typeof loggingConfig;
// Export individual config values for convenience
export const {
LOG_LEVEL,
LOG_FORMAT,
LOG_CONSOLE,
LOG_FILE,
LOG_FILE_PATH,
LOG_FILE_MAX_SIZE,
LOG_FILE_MAX_FILES,
LOG_FILE_DATE_PATTERN,
LOG_ERROR_FILE,
LOG_ERROR_STACK,
LOG_PERFORMANCE,
LOG_SQL_QUERIES,
LOG_HTTP_REQUESTS,
LOG_STRUCTURED,
LOG_TIMESTAMP,
LOG_CALLER_INFO,
LOG_SILENT_MODULES,
LOG_VERBOSE_MODULES,
LOG_SERVICE_NAME,
LOG_SERVICE_VERSION,
LOG_ENVIRONMENT,
} = loggingConfig;

View file

@ -1,63 +1,63 @@
/**
* Loki log aggregation configuration using Yup
* Centralized logging configuration for the Stock Bot platform
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* Loki configuration with validation and defaults
*/
export const lokiConfig = cleanEnv(process.env, {
// Loki Server
LOKI_HOST: str('localhost', 'Loki host'),
LOKI_PORT: port(3100, 'Loki port'),
LOKI_URL: str('', 'Complete Loki URL (overrides host/port)'),
// Authentication
LOKI_USERNAME: str('', 'Loki username (if auth enabled)'),
LOKI_PASSWORD: str('', 'Loki password (if auth enabled)'),
LOKI_TENANT_ID: str('', 'Loki tenant ID (for multi-tenancy)'),
// Push Configuration
LOKI_PUSH_TIMEOUT: num(10000, 'Push timeout in ms'),
LOKI_BATCH_SIZE: num(1024, 'Batch size for log entries'),
LOKI_BATCH_WAIT: num(5, 'Batch wait time in ms'),
// Retention Settings
LOKI_RETENTION_PERIOD: str('30d', 'Log retention period'),
LOKI_MAX_CHUNK_AGE: str('1h', 'Maximum chunk age'),
// TLS Settings
LOKI_TLS_ENABLED: bool(false, 'Enable TLS for Loki'),
LOKI_TLS_INSECURE: bool(false, 'Skip TLS verification'),
// Log Labels
LOKI_DEFAULT_LABELS: str('', 'Default labels for all log entries (JSON format)'),
LOKI_SERVICE_LABEL: str('stock-bot', 'Service label for log entries'),
LOKI_ENVIRONMENT_LABEL: str('development', 'Environment label for log entries'),
});
// Export typed configuration object
export type LokiConfig = typeof lokiConfig;
// Export individual config values for convenience
export const {
LOKI_HOST,
LOKI_PORT,
LOKI_URL,
LOKI_USERNAME,
LOKI_PASSWORD,
LOKI_TENANT_ID,
LOKI_PUSH_TIMEOUT,
LOKI_BATCH_SIZE,
LOKI_BATCH_WAIT,
LOKI_RETENTION_PERIOD,
LOKI_MAX_CHUNK_AGE,
LOKI_TLS_ENABLED,
LOKI_TLS_INSECURE,
LOKI_DEFAULT_LABELS,
LOKI_SERVICE_LABEL,
LOKI_ENVIRONMENT_LABEL,
} = lokiConfig;
/**
* Loki log aggregation configuration using Yup
* Centralized logging configuration for the Stock Bot platform
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* Loki configuration with validation and defaults
*/
export const lokiConfig = cleanEnv(process.env, {
// Loki Server
LOKI_HOST: str('localhost', 'Loki host'),
LOKI_PORT: port(3100, 'Loki port'),
LOKI_URL: str('', 'Complete Loki URL (overrides host/port)'),
// Authentication
LOKI_USERNAME: str('', 'Loki username (if auth enabled)'),
LOKI_PASSWORD: str('', 'Loki password (if auth enabled)'),
LOKI_TENANT_ID: str('', 'Loki tenant ID (for multi-tenancy)'),
// Push Configuration
LOKI_PUSH_TIMEOUT: num(10000, 'Push timeout in ms'),
LOKI_BATCH_SIZE: num(1024, 'Batch size for log entries'),
LOKI_BATCH_WAIT: num(5, 'Batch wait time in ms'),
// Retention Settings
LOKI_RETENTION_PERIOD: str('30d', 'Log retention period'),
LOKI_MAX_CHUNK_AGE: str('1h', 'Maximum chunk age'),
// TLS Settings
LOKI_TLS_ENABLED: bool(false, 'Enable TLS for Loki'),
LOKI_TLS_INSECURE: bool(false, 'Skip TLS verification'),
// Log Labels
LOKI_DEFAULT_LABELS: str('', 'Default labels for all log entries (JSON format)'),
LOKI_SERVICE_LABEL: str('stock-bot', 'Service label for log entries'),
LOKI_ENVIRONMENT_LABEL: str('development', 'Environment label for log entries'),
});
// Export typed configuration object
export type LokiConfig = typeof lokiConfig;
// Export individual config values for convenience
export const {
LOKI_HOST,
LOKI_PORT,
LOKI_URL,
LOKI_USERNAME,
LOKI_PASSWORD,
LOKI_TENANT_ID,
LOKI_PUSH_TIMEOUT,
LOKI_BATCH_SIZE,
LOKI_BATCH_WAIT,
LOKI_RETENTION_PERIOD,
LOKI_MAX_CHUNK_AGE,
LOKI_TLS_ENABLED,
LOKI_TLS_INSECURE,
LOKI_DEFAULT_LABELS,
LOKI_SERVICE_LABEL,
LOKI_ENVIRONMENT_LABEL,
} = lokiConfig;

View file

@ -1,73 +1,77 @@
/**
* MongoDB configuration using Yup
* Document storage for sentiment data, raw documents, and unstructured data
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num, strWithChoices } = envValidators;
/**
* MongoDB configuration with validation and defaults
*/
export const mongodbConfig = cleanEnv(process.env, {
// MongoDB Connection
MONGODB_HOST: str('localhost', 'MongoDB host'),
MONGODB_PORT: port(27017, 'MongoDB port'),
MONGODB_DATABASE: str('trading_documents', 'MongoDB database name'),
// Authentication
MONGODB_USERNAME: str('trading_admin', 'MongoDB username'),
MONGODB_PASSWORD: str('', 'MongoDB password'),
MONGODB_AUTH_SOURCE: str('admin', 'MongoDB authentication database'),
// Connection URI (alternative to individual settings)
MONGODB_URI: str('', 'Complete MongoDB connection URI (overrides individual settings)'),
// Connection Pool Settings
MONGODB_MAX_POOL_SIZE: num(10, 'Maximum connection pool size'),
MONGODB_MIN_POOL_SIZE: num(0, 'Minimum connection pool size'),
MONGODB_MAX_IDLE_TIME: num(30000, 'Maximum idle time for connections in ms'),
// Timeouts
MONGODB_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'),
MONGODB_SOCKET_TIMEOUT: num(30000, 'Socket timeout in ms'),
MONGODB_SERVER_SELECTION_TIMEOUT: num(5000, 'Server selection timeout in ms'),
// SSL/TLS Settings
MONGODB_TLS: bool(false, 'Enable TLS for MongoDB connection'),
MONGODB_TLS_INSECURE: bool(false, 'Allow invalid certificates in TLS mode'),
MONGODB_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'),
// Additional Settings
MONGODB_RETRY_WRITES: bool(true, 'Enable retryable writes'),
MONGODB_JOURNAL: bool(true, 'Enable write concern journal'),
MONGODB_READ_PREFERENCE: strWithChoices(['primary', 'primaryPreferred', 'secondary', 'secondaryPreferred', 'nearest'], 'primary', 'MongoDB read preference'),
MONGODB_WRITE_CONCERN: str('majority', 'Write concern level'),
});
// Export typed configuration object
export type MongoDbConfig = typeof mongodbConfig;
// Export individual config values for convenience
export const {
MONGODB_HOST,
MONGODB_PORT,
MONGODB_DATABASE,
MONGODB_USERNAME,
MONGODB_PASSWORD,
MONGODB_AUTH_SOURCE,
MONGODB_URI,
MONGODB_MAX_POOL_SIZE,
MONGODB_MIN_POOL_SIZE,
MONGODB_MAX_IDLE_TIME,
MONGODB_CONNECT_TIMEOUT,
MONGODB_SOCKET_TIMEOUT,
MONGODB_SERVER_SELECTION_TIMEOUT,
MONGODB_TLS,
MONGODB_TLS_INSECURE,
MONGODB_TLS_CA_FILE,
MONGODB_RETRY_WRITES,
MONGODB_JOURNAL,
MONGODB_READ_PREFERENCE,
MONGODB_WRITE_CONCERN,
} = mongodbConfig;
/**
* MongoDB configuration using Yup
* Document storage for sentiment data, raw documents, and unstructured data
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num, strWithChoices } = envValidators;
/**
* MongoDB configuration with validation and defaults
*/
export const mongodbConfig = cleanEnv(process.env, {
// MongoDB Connection
MONGODB_HOST: str('localhost', 'MongoDB host'),
MONGODB_PORT: port(27017, 'MongoDB port'),
MONGODB_DATABASE: str('trading_documents', 'MongoDB database name'),
// Authentication
MONGODB_USERNAME: str('trading_admin', 'MongoDB username'),
MONGODB_PASSWORD: str('', 'MongoDB password'),
MONGODB_AUTH_SOURCE: str('admin', 'MongoDB authentication database'),
// Connection URI (alternative to individual settings)
MONGODB_URI: str('', 'Complete MongoDB connection URI (overrides individual settings)'),
// Connection Pool Settings
MONGODB_MAX_POOL_SIZE: num(10, 'Maximum connection pool size'),
MONGODB_MIN_POOL_SIZE: num(0, 'Minimum connection pool size'),
MONGODB_MAX_IDLE_TIME: num(30000, 'Maximum idle time for connections in ms'),
// Timeouts
MONGODB_CONNECT_TIMEOUT: num(10000, 'Connection timeout in ms'),
MONGODB_SOCKET_TIMEOUT: num(30000, 'Socket timeout in ms'),
MONGODB_SERVER_SELECTION_TIMEOUT: num(5000, 'Server selection timeout in ms'),
// SSL/TLS Settings
MONGODB_TLS: bool(false, 'Enable TLS for MongoDB connection'),
MONGODB_TLS_INSECURE: bool(false, 'Allow invalid certificates in TLS mode'),
MONGODB_TLS_CA_FILE: str('', 'Path to TLS CA certificate file'),
// Additional Settings
MONGODB_RETRY_WRITES: bool(true, 'Enable retryable writes'),
MONGODB_JOURNAL: bool(true, 'Enable write concern journal'),
MONGODB_READ_PREFERENCE: strWithChoices(
['primary', 'primaryPreferred', 'secondary', 'secondaryPreferred', 'nearest'],
'primary',
'MongoDB read preference'
),
MONGODB_WRITE_CONCERN: str('majority', 'Write concern level'),
});
// Export typed configuration object
export type MongoDbConfig = typeof mongodbConfig;
// Export individual config values for convenience
export const {
MONGODB_HOST,
MONGODB_PORT,
MONGODB_DATABASE,
MONGODB_USERNAME,
MONGODB_PASSWORD,
MONGODB_AUTH_SOURCE,
MONGODB_URI,
MONGODB_MAX_POOL_SIZE,
MONGODB_MIN_POOL_SIZE,
MONGODB_MAX_IDLE_TIME,
MONGODB_CONNECT_TIMEOUT,
MONGODB_SOCKET_TIMEOUT,
MONGODB_SERVER_SELECTION_TIMEOUT,
MONGODB_TLS,
MONGODB_TLS_INSECURE,
MONGODB_TLS_CA_FILE,
MONGODB_RETRY_WRITES,
MONGODB_JOURNAL,
MONGODB_READ_PREFERENCE,
MONGODB_WRITE_CONCERN,
} = mongodbConfig;

View file

@ -1,88 +1,92 @@
/**
* Monitoring configuration using Yup
* Prometheus metrics, Grafana visualization, and Loki logging
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num, strWithChoices } = envValidators;
/**
* Prometheus configuration with validation and defaults
*/
export const prometheusConfig = cleanEnv(process.env, {
// Prometheus Server
PROMETHEUS_HOST: str('localhost', 'Prometheus host'),
PROMETHEUS_PORT: port(9090, 'Prometheus port'),
PROMETHEUS_URL: str('', 'Complete Prometheus URL (overrides host/port)'),
// Authentication
PROMETHEUS_USERNAME: str('', 'Prometheus username (if auth enabled)'),
PROMETHEUS_PASSWORD: str('', 'Prometheus password (if auth enabled)'),
// Metrics Collection
PROMETHEUS_SCRAPE_INTERVAL: str('15s', 'Default scrape interval'),
PROMETHEUS_EVALUATION_INTERVAL: str('15s', 'Rule evaluation interval'),
PROMETHEUS_RETENTION_TIME: str('15d', 'Data retention time'),
// TLS Settings
PROMETHEUS_TLS_ENABLED: bool(false, 'Enable TLS for Prometheus'),
PROMETHEUS_TLS_INSECURE: bool(false, 'Skip TLS verification'),
});
/**
* Grafana configuration with validation and defaults
*/
export const grafanaConfig = cleanEnv(process.env, {
// Grafana Server
GRAFANA_HOST: str('localhost', 'Grafana host'),
GRAFANA_PORT: port(3000, 'Grafana port'),
GRAFANA_URL: str('', 'Complete Grafana URL (overrides host/port)'),
// Authentication
GRAFANA_ADMIN_USER: str('admin', 'Grafana admin username'),
GRAFANA_ADMIN_PASSWORD: str('admin', 'Grafana admin password'),
// Security Settings
GRAFANA_ALLOW_SIGN_UP: bool(false, 'Allow user sign up'),
GRAFANA_SECRET_KEY: str('', 'Grafana secret key for encryption'),
// Database Settings
GRAFANA_DATABASE_TYPE: strWithChoices(['mysql', 'postgres', 'sqlite3'], 'sqlite3', 'Grafana database type'),
GRAFANA_DATABASE_URL: str('', 'Grafana database URL'),
// Feature Flags
GRAFANA_DISABLE_GRAVATAR: bool(true, 'Disable Gravatar avatars'),
GRAFANA_ENABLE_GZIP: bool(true, 'Enable gzip compression'),
});
// Export typed configuration objects
export type PrometheusConfig = typeof prometheusConfig;
export type GrafanaConfig = typeof grafanaConfig;
// Export individual config values for convenience
export const {
PROMETHEUS_HOST,
PROMETHEUS_PORT,
PROMETHEUS_URL,
PROMETHEUS_USERNAME,
PROMETHEUS_PASSWORD,
PROMETHEUS_SCRAPE_INTERVAL,
PROMETHEUS_EVALUATION_INTERVAL,
PROMETHEUS_RETENTION_TIME,
PROMETHEUS_TLS_ENABLED,
PROMETHEUS_TLS_INSECURE,
} = prometheusConfig;
export const {
GRAFANA_HOST,
GRAFANA_PORT,
GRAFANA_URL,
GRAFANA_ADMIN_USER,
GRAFANA_ADMIN_PASSWORD,
GRAFANA_ALLOW_SIGN_UP,
GRAFANA_SECRET_KEY,
GRAFANA_DATABASE_TYPE,
GRAFANA_DATABASE_URL,
GRAFANA_DISABLE_GRAVATAR,
GRAFANA_ENABLE_GZIP,
} = grafanaConfig;
/**
* Monitoring configuration using Yup
* Prometheus metrics, Grafana visualization, and Loki logging
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num, strWithChoices } = envValidators;
/**
* Prometheus configuration with validation and defaults
*/
export const prometheusConfig = cleanEnv(process.env, {
// Prometheus Server
PROMETHEUS_HOST: str('localhost', 'Prometheus host'),
PROMETHEUS_PORT: port(9090, 'Prometheus port'),
PROMETHEUS_URL: str('', 'Complete Prometheus URL (overrides host/port)'),
// Authentication
PROMETHEUS_USERNAME: str('', 'Prometheus username (if auth enabled)'),
PROMETHEUS_PASSWORD: str('', 'Prometheus password (if auth enabled)'),
// Metrics Collection
PROMETHEUS_SCRAPE_INTERVAL: str('15s', 'Default scrape interval'),
PROMETHEUS_EVALUATION_INTERVAL: str('15s', 'Rule evaluation interval'),
PROMETHEUS_RETENTION_TIME: str('15d', 'Data retention time'),
// TLS Settings
PROMETHEUS_TLS_ENABLED: bool(false, 'Enable TLS for Prometheus'),
PROMETHEUS_TLS_INSECURE: bool(false, 'Skip TLS verification'),
});
/**
* Grafana configuration with validation and defaults
*/
export const grafanaConfig = cleanEnv(process.env, {
// Grafana Server
GRAFANA_HOST: str('localhost', 'Grafana host'),
GRAFANA_PORT: port(3000, 'Grafana port'),
GRAFANA_URL: str('', 'Complete Grafana URL (overrides host/port)'),
// Authentication
GRAFANA_ADMIN_USER: str('admin', 'Grafana admin username'),
GRAFANA_ADMIN_PASSWORD: str('admin', 'Grafana admin password'),
// Security Settings
GRAFANA_ALLOW_SIGN_UP: bool(false, 'Allow user sign up'),
GRAFANA_SECRET_KEY: str('', 'Grafana secret key for encryption'),
// Database Settings
GRAFANA_DATABASE_TYPE: strWithChoices(
['mysql', 'postgres', 'sqlite3'],
'sqlite3',
'Grafana database type'
),
GRAFANA_DATABASE_URL: str('', 'Grafana database URL'),
// Feature Flags
GRAFANA_DISABLE_GRAVATAR: bool(true, 'Disable Gravatar avatars'),
GRAFANA_ENABLE_GZIP: bool(true, 'Enable gzip compression'),
});
// Export typed configuration objects
export type PrometheusConfig = typeof prometheusConfig;
export type GrafanaConfig = typeof grafanaConfig;
// Export individual config values for convenience
export const {
PROMETHEUS_HOST,
PROMETHEUS_PORT,
PROMETHEUS_URL,
PROMETHEUS_USERNAME,
PROMETHEUS_PASSWORD,
PROMETHEUS_SCRAPE_INTERVAL,
PROMETHEUS_EVALUATION_INTERVAL,
PROMETHEUS_RETENTION_TIME,
PROMETHEUS_TLS_ENABLED,
PROMETHEUS_TLS_INSECURE,
} = prometheusConfig;
export const {
GRAFANA_HOST,
GRAFANA_PORT,
GRAFANA_URL,
GRAFANA_ADMIN_USER,
GRAFANA_ADMIN_PASSWORD,
GRAFANA_ALLOW_SIGN_UP,
GRAFANA_SECRET_KEY,
GRAFANA_DATABASE_TYPE,
GRAFANA_DATABASE_URL,
GRAFANA_DISABLE_GRAVATAR,
GRAFANA_ENABLE_GZIP,
} = grafanaConfig;

View file

@ -1,56 +1,56 @@
/**
* PostgreSQL configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* PostgreSQL configuration with validation and defaults
*/
export const postgresConfig = cleanEnv(process.env, {
// PostgreSQL Connection Settings
POSTGRES_HOST: str('localhost', 'PostgreSQL host'),
POSTGRES_PORT: port(5432, 'PostgreSQL port'),
POSTGRES_DATABASE: str('stockbot', 'PostgreSQL database name'),
POSTGRES_USERNAME: str('stockbot', 'PostgreSQL username'),
POSTGRES_PASSWORD: str('', 'PostgreSQL password'),
// Connection Pool Settings
POSTGRES_POOL_MIN: num(2, 'Minimum pool connections'),
POSTGRES_POOL_MAX: num(10, 'Maximum pool connections'),
POSTGRES_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'),
// SSL Configuration
POSTGRES_SSL: bool(false, 'Enable SSL for PostgreSQL connection'),
POSTGRES_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'),
// Additional Settings
POSTGRES_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'),
POSTGRES_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
POSTGRES_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'),
POSTGRES_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'),
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'),
});
// Export typed configuration object
export type PostgresConfig = typeof postgresConfig;
// Export individual config values for convenience
export const {
POSTGRES_HOST,
POSTGRES_PORT,
POSTGRES_DATABASE,
POSTGRES_USERNAME,
POSTGRES_PASSWORD,
POSTGRES_POOL_MIN,
POSTGRES_POOL_MAX,
POSTGRES_POOL_IDLE_TIMEOUT,
POSTGRES_SSL,
POSTGRES_SSL_REJECT_UNAUTHORIZED,
POSTGRES_QUERY_TIMEOUT,
POSTGRES_CONNECTION_TIMEOUT,
POSTGRES_STATEMENT_TIMEOUT,
POSTGRES_LOCK_TIMEOUT,
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
} = postgresConfig;
/**
* PostgreSQL configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* PostgreSQL configuration with validation and defaults
*/
export const postgresConfig = cleanEnv(process.env, {
// PostgreSQL Connection Settings
POSTGRES_HOST: str('localhost', 'PostgreSQL host'),
POSTGRES_PORT: port(5432, 'PostgreSQL port'),
POSTGRES_DATABASE: str('stockbot', 'PostgreSQL database name'),
POSTGRES_USERNAME: str('stockbot', 'PostgreSQL username'),
POSTGRES_PASSWORD: str('', 'PostgreSQL password'),
// Connection Pool Settings
POSTGRES_POOL_MIN: num(2, 'Minimum pool connections'),
POSTGRES_POOL_MAX: num(10, 'Maximum pool connections'),
POSTGRES_POOL_IDLE_TIMEOUT: num(30000, 'Pool idle timeout in ms'),
// SSL Configuration
POSTGRES_SSL: bool(false, 'Enable SSL for PostgreSQL connection'),
POSTGRES_SSL_REJECT_UNAUTHORIZED: bool(true, 'Reject unauthorized SSL certificates'),
// Additional Settings
POSTGRES_QUERY_TIMEOUT: num(30000, 'Query timeout in ms'),
POSTGRES_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
POSTGRES_STATEMENT_TIMEOUT: num(30000, 'Statement timeout in ms'),
POSTGRES_LOCK_TIMEOUT: num(10000, 'Lock timeout in ms'),
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT: num(60000, 'Idle in transaction timeout in ms'),
});
// Export typed configuration object
export type PostgresConfig = typeof postgresConfig;
// Export individual config values for convenience
export const {
POSTGRES_HOST,
POSTGRES_PORT,
POSTGRES_DATABASE,
POSTGRES_USERNAME,
POSTGRES_PASSWORD,
POSTGRES_POOL_MIN,
POSTGRES_POOL_MAX,
POSTGRES_POOL_IDLE_TIMEOUT,
POSTGRES_SSL,
POSTGRES_SSL_REJECT_UNAUTHORIZED,
POSTGRES_QUERY_TIMEOUT,
POSTGRES_CONNECTION_TIMEOUT,
POSTGRES_STATEMENT_TIMEOUT,
POSTGRES_LOCK_TIMEOUT,
POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
} = postgresConfig;

View file

@ -1,55 +1,55 @@
/**
* QuestDB configuration using Yup
* Time-series database for OHLCV data, indicators, and performance metrics
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* QuestDB configuration with validation and defaults
*/
export const questdbConfig = cleanEnv(process.env, {
// QuestDB Connection
QUESTDB_HOST: str('localhost', 'QuestDB host'),
QUESTDB_HTTP_PORT: port(9000, 'QuestDB HTTP port (web console)'),
QUESTDB_PG_PORT: port(8812, 'QuestDB PostgreSQL wire protocol port'),
QUESTDB_INFLUX_PORT: port(9009, 'QuestDB InfluxDB line protocol port'),
// Authentication (if enabled)
QUESTDB_USER: str('', 'QuestDB username (if auth enabled)'),
QUESTDB_PASSWORD: str('', 'QuestDB password (if auth enabled)'),
// Connection Settings
QUESTDB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
QUESTDB_REQUEST_TIMEOUT: num(30000, 'Request timeout in ms'),
QUESTDB_RETRY_ATTEMPTS: num(3, 'Number of retry attempts'),
// TLS Settings
QUESTDB_TLS_ENABLED: bool(false, 'Enable TLS for QuestDB connection'),
QUESTDB_TLS_VERIFY_SERVER_CERT: bool(true, 'Verify server certificate'),
// Database Settings
QUESTDB_DEFAULT_DATABASE: str('qdb', 'Default database name'),
QUESTDB_TELEMETRY_ENABLED: bool(false, 'Enable telemetry'),
});
// Export typed configuration object
export type QuestDbConfig = typeof questdbConfig;
// Export individual config values for convenience
export const {
QUESTDB_HOST,
QUESTDB_HTTP_PORT,
QUESTDB_PG_PORT,
QUESTDB_INFLUX_PORT,
QUESTDB_USER,
QUESTDB_PASSWORD,
QUESTDB_CONNECTION_TIMEOUT,
QUESTDB_REQUEST_TIMEOUT,
QUESTDB_RETRY_ATTEMPTS,
QUESTDB_TLS_ENABLED,
QUESTDB_TLS_VERIFY_SERVER_CERT,
QUESTDB_DEFAULT_DATABASE,
QUESTDB_TELEMETRY_ENABLED,
} = questdbConfig;
/**
* QuestDB configuration using Yup
* Time-series database for OHLCV data, indicators, and performance metrics
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, port, bool, num } = envValidators;
/**
* QuestDB configuration with validation and defaults
*/
export const questdbConfig = cleanEnv(process.env, {
// QuestDB Connection
QUESTDB_HOST: str('localhost', 'QuestDB host'),
QUESTDB_HTTP_PORT: port(9000, 'QuestDB HTTP port (web console)'),
QUESTDB_PG_PORT: port(8812, 'QuestDB PostgreSQL wire protocol port'),
QUESTDB_INFLUX_PORT: port(9009, 'QuestDB InfluxDB line protocol port'),
// Authentication (if enabled)
QUESTDB_USER: str('', 'QuestDB username (if auth enabled)'),
QUESTDB_PASSWORD: str('', 'QuestDB password (if auth enabled)'),
// Connection Settings
QUESTDB_CONNECTION_TIMEOUT: num(5000, 'Connection timeout in ms'),
QUESTDB_REQUEST_TIMEOUT: num(30000, 'Request timeout in ms'),
QUESTDB_RETRY_ATTEMPTS: num(3, 'Number of retry attempts'),
// TLS Settings
QUESTDB_TLS_ENABLED: bool(false, 'Enable TLS for QuestDB connection'),
QUESTDB_TLS_VERIFY_SERVER_CERT: bool(true, 'Verify server certificate'),
// Database Settings
QUESTDB_DEFAULT_DATABASE: str('qdb', 'Default database name'),
QUESTDB_TELEMETRY_ENABLED: bool(false, 'Enable telemetry'),
});
// Export typed configuration object
export type QuestDbConfig = typeof questdbConfig;
// Export individual config values for convenience
export const {
QUESTDB_HOST,
QUESTDB_HTTP_PORT,
QUESTDB_PG_PORT,
QUESTDB_INFLUX_PORT,
QUESTDB_USER,
QUESTDB_PASSWORD,
QUESTDB_CONNECTION_TIMEOUT,
QUESTDB_REQUEST_TIMEOUT,
QUESTDB_RETRY_ATTEMPTS,
QUESTDB_TLS_ENABLED,
QUESTDB_TLS_VERIFY_SERVER_CERT,
QUESTDB_DEFAULT_DATABASE,
QUESTDB_TELEMETRY_ENABLED,
} = questdbConfig;

View file

@ -1,80 +1,80 @@
/**
* Risk management configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, num, bool, strWithChoices } = envValidators;
/**
* Risk configuration with validation and defaults
*/
export const riskConfig = cleanEnv(process.env, {
// Position Sizing
RISK_MAX_POSITION_SIZE: num(0.1, 'Maximum position size as percentage of portfolio'),
RISK_MAX_PORTFOLIO_EXPOSURE: num(0.8, 'Maximum portfolio exposure percentage'),
RISK_MAX_SINGLE_ASSET_EXPOSURE: num(0.2, 'Maximum exposure to single asset'),
RISK_MAX_SECTOR_EXPOSURE: num(0.3, 'Maximum exposure to single sector'),
// Stop Loss and Take Profit
RISK_DEFAULT_STOP_LOSS: num(0.05, 'Default stop loss percentage'),
RISK_DEFAULT_TAKE_PROFIT: num(0.15, 'Default take profit percentage'),
RISK_TRAILING_STOP_ENABLED: bool(true, 'Enable trailing stop losses'),
RISK_TRAILING_STOP_DISTANCE: num(0.03, 'Trailing stop distance percentage'),
// Risk Limits
RISK_MAX_DAILY_LOSS: num(0.05, 'Maximum daily loss percentage'),
RISK_MAX_WEEKLY_LOSS: num(0.1, 'Maximum weekly loss percentage'),
RISK_MAX_MONTHLY_LOSS: num(0.2, 'Maximum monthly loss percentage'),
// Volatility Controls
RISK_MAX_VOLATILITY_THRESHOLD: num(0.4, 'Maximum volatility threshold'),
RISK_VOLATILITY_LOOKBACK_DAYS: num(20, 'Volatility calculation lookback period'),
// Correlation Controls
RISK_MAX_CORRELATION_THRESHOLD: num(0.7, 'Maximum correlation between positions'),
RISK_CORRELATION_LOOKBACK_DAYS: num(60, 'Correlation calculation lookback period'),
// Leverage Controls
RISK_MAX_LEVERAGE: num(2.0, 'Maximum leverage allowed'),
RISK_MARGIN_CALL_THRESHOLD: num(0.3, 'Margin call threshold'),
// Circuit Breakers
RISK_CIRCUIT_BREAKER_ENABLED: bool(true, 'Enable circuit breakers'),
RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD: num(0.1, 'Circuit breaker loss threshold'),
RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES: num(60, 'Circuit breaker cooldown period'),
// Risk Model
RISK_MODEL_TYPE: strWithChoices(['var', 'cvar', 'expected_shortfall'], 'var', 'Risk model type'),
RISK_CONFIDENCE_LEVEL: num(0.95, 'Risk model confidence level'),
RISK_TIME_HORIZON_DAYS: num(1, 'Risk time horizon in days'),
});
// Export typed configuration object
export type RiskConfig = typeof riskConfig;
// Export individual config values for convenience
export const {
RISK_MAX_POSITION_SIZE,
RISK_MAX_PORTFOLIO_EXPOSURE,
RISK_MAX_SINGLE_ASSET_EXPOSURE,
RISK_MAX_SECTOR_EXPOSURE,
RISK_DEFAULT_STOP_LOSS,
RISK_DEFAULT_TAKE_PROFIT,
RISK_TRAILING_STOP_ENABLED,
RISK_TRAILING_STOP_DISTANCE,
RISK_MAX_DAILY_LOSS,
RISK_MAX_WEEKLY_LOSS,
RISK_MAX_MONTHLY_LOSS,
RISK_MAX_VOLATILITY_THRESHOLD,
RISK_VOLATILITY_LOOKBACK_DAYS,
RISK_MAX_CORRELATION_THRESHOLD,
RISK_CORRELATION_LOOKBACK_DAYS,
RISK_MAX_LEVERAGE,
RISK_MARGIN_CALL_THRESHOLD,
RISK_CIRCUIT_BREAKER_ENABLED,
RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD,
RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES,
RISK_MODEL_TYPE,
RISK_CONFIDENCE_LEVEL,
RISK_TIME_HORIZON_DAYS,
} = riskConfig;
/**
* Risk management configuration using Yup
*/
import { cleanEnv, envValidators } from './env-utils';
const { str, num, bool, strWithChoices } = envValidators;
/**
* Risk configuration with validation and defaults
*/
export const riskConfig = cleanEnv(process.env, {
// Position Sizing
RISK_MAX_POSITION_SIZE: num(0.1, 'Maximum position size as percentage of portfolio'),
RISK_MAX_PORTFOLIO_EXPOSURE: num(0.8, 'Maximum portfolio exposure percentage'),
RISK_MAX_SINGLE_ASSET_EXPOSURE: num(0.2, 'Maximum exposure to single asset'),
RISK_MAX_SECTOR_EXPOSURE: num(0.3, 'Maximum exposure to single sector'),
// Stop Loss and Take Profit
RISK_DEFAULT_STOP_LOSS: num(0.05, 'Default stop loss percentage'),
RISK_DEFAULT_TAKE_PROFIT: num(0.15, 'Default take profit percentage'),
RISK_TRAILING_STOP_ENABLED: bool(true, 'Enable trailing stop losses'),
RISK_TRAILING_STOP_DISTANCE: num(0.03, 'Trailing stop distance percentage'),
// Risk Limits
RISK_MAX_DAILY_LOSS: num(0.05, 'Maximum daily loss percentage'),
RISK_MAX_WEEKLY_LOSS: num(0.1, 'Maximum weekly loss percentage'),
RISK_MAX_MONTHLY_LOSS: num(0.2, 'Maximum monthly loss percentage'),
// Volatility Controls
RISK_MAX_VOLATILITY_THRESHOLD: num(0.4, 'Maximum volatility threshold'),
RISK_VOLATILITY_LOOKBACK_DAYS: num(20, 'Volatility calculation lookback period'),
// Correlation Controls
RISK_MAX_CORRELATION_THRESHOLD: num(0.7, 'Maximum correlation between positions'),
RISK_CORRELATION_LOOKBACK_DAYS: num(60, 'Correlation calculation lookback period'),
// Leverage Controls
RISK_MAX_LEVERAGE: num(2.0, 'Maximum leverage allowed'),
RISK_MARGIN_CALL_THRESHOLD: num(0.3, 'Margin call threshold'),
// Circuit Breakers
RISK_CIRCUIT_BREAKER_ENABLED: bool(true, 'Enable circuit breakers'),
RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD: num(0.1, 'Circuit breaker loss threshold'),
RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES: num(60, 'Circuit breaker cooldown period'),
// Risk Model
RISK_MODEL_TYPE: strWithChoices(['var', 'cvar', 'expected_shortfall'], 'var', 'Risk model type'),
RISK_CONFIDENCE_LEVEL: num(0.95, 'Risk model confidence level'),
RISK_TIME_HORIZON_DAYS: num(1, 'Risk time horizon in days'),
});
// Export typed configuration object
export type RiskConfig = typeof riskConfig;
// Export individual config values for convenience
export const {
RISK_MAX_POSITION_SIZE,
RISK_MAX_PORTFOLIO_EXPOSURE,
RISK_MAX_SINGLE_ASSET_EXPOSURE,
RISK_MAX_SECTOR_EXPOSURE,
RISK_DEFAULT_STOP_LOSS,
RISK_DEFAULT_TAKE_PROFIT,
RISK_TRAILING_STOP_ENABLED,
RISK_TRAILING_STOP_DISTANCE,
RISK_MAX_DAILY_LOSS,
RISK_MAX_WEEKLY_LOSS,
RISK_MAX_MONTHLY_LOSS,
RISK_MAX_VOLATILITY_THRESHOLD,
RISK_VOLATILITY_LOOKBACK_DAYS,
RISK_MAX_CORRELATION_THRESHOLD,
RISK_CORRELATION_LOOKBACK_DAYS,
RISK_MAX_LEVERAGE,
RISK_MARGIN_CALL_THRESHOLD,
RISK_CIRCUIT_BREAKER_ENABLED,
RISK_CIRCUIT_BREAKER_LOSS_THRESHOLD,
RISK_CIRCUIT_BREAKER_COOLDOWN_MINUTES,
RISK_MODEL_TYPE,
RISK_CONFIDENCE_LEVEL,
RISK_TIME_HORIZON_DAYS,
} = riskConfig;

View file

@ -1,433 +1,445 @@
/**
* Integration Tests for Config Library
*
* Tests the entire configuration system including module interactions,
* environment loading, validation across modules, and type exports.
*/
import { describe, test, expect, beforeEach } from 'bun:test';
import { setTestEnv, clearEnvVars, getMinimalTestEnv } from '../test/setup';
describe('Config Library Integration', () => {
beforeEach(() => {
// Clear module cache for clean state
// Note: Bun handles module caching differently than Jest
});
describe('Complete Configuration Loading', () => { test('should load all configuration modules successfully', async () => {
setTestEnv(getMinimalTestEnv());
// Import all modules
const [
{ Environment, getEnvironment },
{ postgresConfig },
{ questdbConfig },
{ mongodbConfig },
{ loggingConfig },
{ riskConfig }
] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
// Verify all configs are loaded
expect(Environment).toBeDefined();
expect(getEnvironment).toBeDefined();
expect(postgresConfig).toBeDefined();
expect(questdbConfig).toBeDefined();
expect(mongodbConfig).toBeDefined();
expect(loggingConfig).toBeDefined();
expect(riskConfig).toBeDefined();
// Verify core utilities
expect(getEnvironment()).toBe(Environment.Testing); // Should be Testing due to NODE_ENV=test in setup
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); expect(questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodbConfig.MONGODB_HOST).toBe('localhost'); // fix: use correct property
expect(loggingConfig.LOG_LEVEL).toBeDefined();
expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1);
}); test('should handle missing required environment variables gracefully', async () => {
setTestEnv({
NODE_ENV: 'test'
// Missing required variables
});
// Should be able to load core utilities
const { Environment, getEnvironment } = await import('../src/core');
expect(Environment).toBeDefined();
expect(getEnvironment()).toBe(Environment.Testing);
// Should fail to load modules requiring specific vars (if they have required vars)
// Note: Most modules have defaults, so they might not throw
try {
const { postgresConfig } = await import('../src/postgres');
expect(postgresConfig).toBeDefined();
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value
} catch (error) {
// If it throws, that's also acceptable behavior
expect(error).toBeDefined();
}
}); test('should maintain consistency across environment detection', async () => {
setTestEnv({
NODE_ENV: 'production',
...getMinimalTestEnv()
});
const [
{ Environment, getEnvironment },
{ postgresConfig },
{ questdbConfig },
{ mongodbConfig },
{ loggingConfig }
] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging')
]);
// Note: Due to module caching, environment is set at first import
// All modules should detect the same environment (which will be Testing due to test setup)
expect(getEnvironment()).toBe(Environment.Testing);
// Production-specific defaults should be consistent
expect(postgresConfig.POSTGRES_SSL).toBe(false); // default is false unless overridden expect(questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // checking actual property name
expect(mongodbConfig.MONGODB_TLS).toBe(false); // checking actual property name
expect(loggingConfig.LOG_FORMAT).toBe('json');
});
});
describe('Main Index Exports', () => { test('should export all configuration objects from index', async () => {
setTestEnv(getMinimalTestEnv());
const config = await import('../src/index');
// Core utilities (no coreConfig object)
expect(config.Environment).toBeDefined();
expect(config.getEnvironment).toBeDefined();
expect(config.ConfigurationError).toBeDefined();
// Configuration objects
expect(config.postgresConfig).toBeDefined();
expect(config.questdbConfig).toBeDefined();
expect(config.mongodbConfig).toBeDefined();
expect(config.loggingConfig).toBeDefined();
expect(config.riskConfig).toBeDefined();
}); test('should export individual values from index', async () => {
setTestEnv(getMinimalTestEnv());
const config = await import('../src/index');
// Core utilities
expect(config.Environment).toBeDefined();
expect(config.getEnvironment).toBeDefined();
// Individual configuration values exported from modules
expect(config.POSTGRES_HOST).toBeDefined();
expect(config.POSTGRES_PORT).toBeDefined();
expect(config.QUESTDB_HOST).toBeDefined();
expect(config.MONGODB_HOST).toBeDefined();
// Risk values
expect(config.RISK_MAX_POSITION_SIZE).toBeDefined();
expect(config.RISK_MAX_DAILY_LOSS).toBeDefined();
// Logging values
expect(config.LOG_LEVEL).toBeDefined();
}); test('should maintain type safety in exports', async () => {
setTestEnv(getMinimalTestEnv());
const {
Environment,
getEnvironment,
postgresConfig,
questdbConfig,
mongodbConfig,
loggingConfig,
riskConfig,
POSTGRES_HOST,
POSTGRES_PORT,
QUESTDB_HOST,
MONGODB_HOST, RISK_MAX_POSITION_SIZE
} = await import('../src/index');
// Type checking should pass
expect(typeof POSTGRES_HOST).toBe('string');
expect(typeof POSTGRES_PORT).toBe('number');
expect(typeof QUESTDB_HOST).toBe('string');
expect(typeof MONGODB_HOST).toBe('string');
expect(typeof RISK_MAX_POSITION_SIZE).toBe('number');
// Configuration objects should have expected shapes
expect(postgresConfig).toHaveProperty('POSTGRES_HOST');
expect(postgresConfig).toHaveProperty('POSTGRES_PORT');
expect(questdbConfig).toHaveProperty('QUESTDB_HOST');
expect(mongodbConfig).toHaveProperty('MONGODB_HOST');
expect(loggingConfig).toHaveProperty('LOG_LEVEL');
expect(riskConfig).toHaveProperty('RISK_MAX_POSITION_SIZE');
});
});
describe('Environment Variable Validation', () => {
test('should validate environment variables across all modules', async () => {
setTestEnv({
NODE_ENV: 'test',
LOG_LEVEL: 'info', // valid level
POSTGRES_HOST: 'localhost',
POSTGRES_DATABASE: 'test',
POSTGRES_USERNAME: 'test',
POSTGRES_PASSWORD: 'test',
QUESTDB_HOST: 'localhost',
MONGODB_HOST: 'localhost',
MONGODB_DATABASE: 'test',
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05'
}); // All imports should succeed with valid config
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // default test env
expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost');
expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost');
expect(logging.loggingConfig.LOG_LEVEL).toBe('info'); // set in test
expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // from test env
}); test('should accept valid environment variables across all modules', async () => {
setTestEnv({
NODE_ENV: 'development',
LOG_LEVEL: 'debug',
POSTGRES_HOST: 'localhost',
POSTGRES_PORT: '5432',
POSTGRES_DATABASE: 'stockbot_dev',
POSTGRES_USERNAME: 'dev_user',
POSTGRES_PASSWORD: 'dev_pass',
POSTGRES_SSL: 'false',
QUESTDB_HOST: 'localhost',
QUESTDB_HTTP_PORT: '9000',
QUESTDB_PG_PORT: '8812',
MONGODB_HOST: 'localhost',
MONGODB_DATABASE: 'stockbot_dev',
RISK_MAX_POSITION_SIZE: '0.25',
RISK_MAX_DAILY_LOSS: '0.025',
LOG_FORMAT: 'json',
LOG_FILE_ENABLED: 'false'
});
// All imports should succeed
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
// Since this is the first test to set NODE_ENV to development and modules might not be cached yet,
// this could actually change the environment. Let's test what we actually get.
expect(core.getEnvironment()).toBeDefined(); // Just verify it returns something valid
expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost');
expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost');
expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default value
expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value
});
});
describe('Configuration Consistency', () => { test('should maintain consistent SSL settings across databases', async () => {
setTestEnv({
NODE_ENV: 'production',
POSTGRES_HOST: 'prod-postgres.com',
POSTGRES_DATABASE: 'prod_db',
POSTGRES_USERNAME: 'prod_user',
POSTGRES_PASSWORD: 'prod_pass',
QUESTDB_HOST: 'prod-questdb.com',
MONGODB_HOST: 'prod-mongo.com',
MONGODB_DATABASE: 'prod_db',
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05'
// SSL settings not explicitly set - should use defaults
});
const [postgres, questdb, mongodb] = await Promise.all([
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb')
]);
// Check actual SSL property names and their default values expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default is false
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // default is false
expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); // default is false
}); test('should maintain consistent environment detection across modules', async () => {
setTestEnv({
NODE_ENV: 'staging',
...getMinimalTestEnv()
});
const [core, logging] = await Promise.all([
import('../src/core'),
import('../src/logging')
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
// The setTestEnv call above doesn't actually change the real NODE_ENV because modules cache it
// So we check that the test setup is working correctly
expect(process.env.NODE_ENV).toBe('test'); // This is what's actually set in test environment
});
});
describe('Performance and Caching', () => { test('should cache configuration values between imports', async () => {
setTestEnv(getMinimalTestEnv());
// Import the same module multiple times
const postgres1 = await import('../src/postgres');
const postgres2 = await import('../src/postgres');
const postgres3 = await import('../src/postgres');
// Should return the same object reference (cached)
expect(postgres1.postgresConfig).toBe(postgres2.postgresConfig);
expect(postgres2.postgresConfig).toBe(postgres3.postgresConfig);
});
test('should handle rapid sequential imports', async () => {
setTestEnv(getMinimalTestEnv());
// Import all modules simultaneously
const startTime = Date.now();
await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
const endTime = Date.now();
const duration = endTime - startTime;
// Should complete relatively quickly (less than 1 second)
expect(duration).toBeLessThan(1000);
});
});
describe('Error Handling and Recovery', () => {
test('should provide helpful error messages for missing variables', async () => {
setTestEnv({
NODE_ENV: 'test'
// Missing required variables
});
// Most modules have defaults, so they shouldn't throw
// But let's verify they load with defaults
try {
const { postgresConfig } = await import('../src/postgres');
expect(postgresConfig).toBeDefined();
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value
} catch (error) {
// If it throws, check that error message is helpful
expect((error as Error).message).toBeTruthy();
}
try {
const { riskConfig } = await import('../src/risk');
expect(riskConfig).toBeDefined();
expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value
} catch (error) {
// If it throws, check that error message is helpful
expect((error as Error).message).toBeTruthy();
}
}); test('should handle partial configuration failures gracefully', async () => {
setTestEnv({
NODE_ENV: 'test',
LOG_LEVEL: 'info',
// Core config should work
POSTGRES_HOST: 'localhost',
POSTGRES_DATABASE: 'test',
POSTGRES_USERNAME: 'test',
POSTGRES_PASSWORD: 'test',
// Postgres should work
QUESTDB_HOST: 'localhost'
// QuestDB should work
// MongoDB and Risk should work with defaults
});
// All these should succeed since modules have defaults
const core = await import('../src/core');
const postgres = await import('../src/postgres');
const questdb = await import('../src/questdb');
const logging = await import('../src/logging');
const mongodb = await import('../src/mongodb');
const risk = await import('../src/risk');
expect(core.Environment).toBeDefined();
expect(postgres.postgresConfig).toBeDefined();
expect(questdb.questdbConfig).toBeDefined();
expect(logging.loggingConfig).toBeDefined();
expect(mongodb.mongodbConfig).toBeDefined();
expect(risk.riskConfig).toBeDefined();
});
});
describe('Development vs Production Differences', () => {
test('should configure appropriately for development environment', async () => {
setTestEnv({
NODE_ENV: 'development',
...getMinimalTestEnv(),
POSTGRES_SSL: undefined, // Should default to false
QUESTDB_TLS_ENABLED: undefined, // Should default to false
MONGODB_TLS: undefined, // Should default to false
LOG_FORMAT: undefined, // Should default to json
RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true
});
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk')
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false);
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false);
expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default
expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true); // default
});
test('should configure appropriately for production environment', async () => {
setTestEnv({
NODE_ENV: 'production',
...getMinimalTestEnv(),
POSTGRES_SSL: undefined, // Should default to false (same as dev)
QUESTDB_TLS_ENABLED: undefined, // Should default to false
MONGODB_TLS: undefined, // Should default to false
LOG_FORMAT: undefined, // Should default to json
RISK_CIRCUIT_BREAKER_ENABLED: undefined // Should default to true
});
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk') ]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default doesn't change by env
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false);
expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false);
expect(logging.loggingConfig.LOG_FORMAT).toBe('json');
expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true);
});
});
});
/**
* Integration Tests for Config Library
*
* Tests the entire configuration system including module interactions,
* environment loading, validation across modules, and type exports.
*/
import { beforeEach, describe, expect, test } from 'bun:test';
import { clearEnvVars, getMinimalTestEnv, setTestEnv } from '../test/setup';
describe('Config Library Integration', () => {
beforeEach(() => {
// Clear module cache for clean state
// Note: Bun handles module caching differently than Jest
});
describe('Complete Configuration Loading', () => {
test('should load all configuration modules successfully', async () => {
setTestEnv(getMinimalTestEnv());
// Import all modules
const [
{ Environment, getEnvironment },
{ postgresConfig },
{ questdbConfig },
{ mongodbConfig },
{ loggingConfig },
{ riskConfig },
] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk'),
]);
// Verify all configs are loaded
expect(Environment).toBeDefined();
expect(getEnvironment).toBeDefined();
expect(postgresConfig).toBeDefined();
expect(questdbConfig).toBeDefined();
expect(mongodbConfig).toBeDefined();
expect(loggingConfig).toBeDefined();
expect(riskConfig).toBeDefined();
// Verify core utilities
expect(getEnvironment()).toBe(Environment.Testing); // Should be Testing due to NODE_ENV=test in setup
expect(postgresConfig.POSTGRES_HOST).toBe('localhost');
expect(questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodbConfig.MONGODB_HOST).toBe('localhost'); // fix: use correct property
expect(loggingConfig.LOG_LEVEL).toBeDefined();
expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1);
});
test('should handle missing required environment variables gracefully', async () => {
setTestEnv({
NODE_ENV: 'test',
// Missing required variables
});
// Should be able to load core utilities
const { Environment, getEnvironment } = await import('../src/core');
expect(Environment).toBeDefined();
expect(getEnvironment()).toBe(Environment.Testing);
// Should fail to load modules requiring specific vars (if they have required vars)
// Note: Most modules have defaults, so they might not throw
try {
const { postgresConfig } = await import('../src/postgres');
expect(postgresConfig).toBeDefined();
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value
} catch (error) {
// If it throws, that's also acceptable behavior
expect(error).toBeDefined();
}
});
test('should maintain consistency across environment detection', async () => {
setTestEnv({
NODE_ENV: 'production',
...getMinimalTestEnv(),
});
const [
{ Environment, getEnvironment },
{ postgresConfig },
{ questdbConfig },
{ mongodbConfig },
{ loggingConfig },
] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
]);
// Note: Due to module caching, environment is set at first import
// All modules should detect the same environment (which will be Testing due to test setup)
expect(getEnvironment()).toBe(Environment.Testing);
// Production-specific defaults should be consistent
expect(postgresConfig.POSTGRES_SSL).toBe(false); // default is false unless overridden expect(questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // checking actual property name
expect(mongodbConfig.MONGODB_TLS).toBe(false); // checking actual property name
expect(loggingConfig.LOG_FORMAT).toBe('json');
});
});
describe('Main Index Exports', () => {
test('should export all configuration objects from index', async () => {
setTestEnv(getMinimalTestEnv());
const config = await import('../src/index');
// Core utilities (no coreConfig object)
expect(config.Environment).toBeDefined();
expect(config.getEnvironment).toBeDefined();
expect(config.ConfigurationError).toBeDefined();
// Configuration objects
expect(config.postgresConfig).toBeDefined();
expect(config.questdbConfig).toBeDefined();
expect(config.mongodbConfig).toBeDefined();
expect(config.loggingConfig).toBeDefined();
expect(config.riskConfig).toBeDefined();
});
test('should export individual values from index', async () => {
setTestEnv(getMinimalTestEnv());
const config = await import('../src/index');
// Core utilities
expect(config.Environment).toBeDefined();
expect(config.getEnvironment).toBeDefined();
// Individual configuration values exported from modules
expect(config.POSTGRES_HOST).toBeDefined();
expect(config.POSTGRES_PORT).toBeDefined();
expect(config.QUESTDB_HOST).toBeDefined();
expect(config.MONGODB_HOST).toBeDefined();
// Risk values
expect(config.RISK_MAX_POSITION_SIZE).toBeDefined();
expect(config.RISK_MAX_DAILY_LOSS).toBeDefined();
// Logging values
expect(config.LOG_LEVEL).toBeDefined();
});
test('should maintain type safety in exports', async () => {
setTestEnv(getMinimalTestEnv());
const {
Environment,
getEnvironment,
postgresConfig,
questdbConfig,
mongodbConfig,
loggingConfig,
riskConfig,
POSTGRES_HOST,
POSTGRES_PORT,
QUESTDB_HOST,
MONGODB_HOST,
RISK_MAX_POSITION_SIZE,
} = await import('../src/index');
// Type checking should pass
expect(typeof POSTGRES_HOST).toBe('string');
expect(typeof POSTGRES_PORT).toBe('number');
expect(typeof QUESTDB_HOST).toBe('string');
expect(typeof MONGODB_HOST).toBe('string');
expect(typeof RISK_MAX_POSITION_SIZE).toBe('number');
// Configuration objects should have expected shapes
expect(postgresConfig).toHaveProperty('POSTGRES_HOST');
expect(postgresConfig).toHaveProperty('POSTGRES_PORT');
expect(questdbConfig).toHaveProperty('QUESTDB_HOST');
expect(mongodbConfig).toHaveProperty('MONGODB_HOST');
expect(loggingConfig).toHaveProperty('LOG_LEVEL');
expect(riskConfig).toHaveProperty('RISK_MAX_POSITION_SIZE');
});
});
describe('Environment Variable Validation', () => {
test('should validate environment variables across all modules', async () => {
setTestEnv({
NODE_ENV: 'test',
LOG_LEVEL: 'info', // valid level
POSTGRES_HOST: 'localhost',
POSTGRES_DATABASE: 'test',
POSTGRES_USERNAME: 'test',
POSTGRES_PASSWORD: 'test',
QUESTDB_HOST: 'localhost',
MONGODB_HOST: 'localhost',
MONGODB_DATABASE: 'test',
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05',
}); // All imports should succeed with valid config
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk'),
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // default test env
expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost');
expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost');
expect(logging.loggingConfig.LOG_LEVEL).toBe('info'); // set in test
expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // from test env
});
test('should accept valid environment variables across all modules', async () => {
setTestEnv({
NODE_ENV: 'development',
LOG_LEVEL: 'debug',
POSTGRES_HOST: 'localhost',
POSTGRES_PORT: '5432',
POSTGRES_DATABASE: 'stockbot_dev',
POSTGRES_USERNAME: 'dev_user',
POSTGRES_PASSWORD: 'dev_pass',
POSTGRES_SSL: 'false',
QUESTDB_HOST: 'localhost',
QUESTDB_HTTP_PORT: '9000',
QUESTDB_PG_PORT: '8812',
MONGODB_HOST: 'localhost',
MONGODB_DATABASE: 'stockbot_dev',
RISK_MAX_POSITION_SIZE: '0.25',
RISK_MAX_DAILY_LOSS: '0.025',
LOG_FORMAT: 'json',
LOG_FILE_ENABLED: 'false',
});
// All imports should succeed
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk'),
]);
// Since this is the first test to set NODE_ENV to development and modules might not be cached yet,
// this could actually change the environment. Let's test what we actually get.
expect(core.getEnvironment()).toBeDefined(); // Just verify it returns something valid
expect(postgres.postgresConfig.POSTGRES_HOST).toBe('localhost');
expect(questdb.questdbConfig.QUESTDB_HOST).toBe('localhost');
expect(mongodb.mongodbConfig.MONGODB_HOST).toBe('localhost');
expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default value
expect(risk.riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value
});
});
describe('Configuration Consistency', () => {
test('should maintain consistent SSL settings across databases', async () => {
setTestEnv({
NODE_ENV: 'production',
POSTGRES_HOST: 'prod-postgres.com',
POSTGRES_DATABASE: 'prod_db',
POSTGRES_USERNAME: 'prod_user',
POSTGRES_PASSWORD: 'prod_pass',
QUESTDB_HOST: 'prod-questdb.com',
MONGODB_HOST: 'prod-mongo.com',
MONGODB_DATABASE: 'prod_db',
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05',
// SSL settings not explicitly set - should use defaults
});
const [postgres, questdb, mongodb] = await Promise.all([
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
]);
// Check actual SSL property names and their default values expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default is false
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false); // default is false
expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false); // default is false
});
test('should maintain consistent environment detection across modules', async () => {
setTestEnv({
NODE_ENV: 'staging',
...getMinimalTestEnv(),
});
const [core, logging] = await Promise.all([import('../src/core'), import('../src/logging')]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
// The setTestEnv call above doesn't actually change the real NODE_ENV because modules cache it
// So we check that the test setup is working correctly
expect(process.env.NODE_ENV).toBe('test'); // This is what's actually set in test environment
});
});
describe('Performance and Caching', () => {
test('should cache configuration values between imports', async () => {
setTestEnv(getMinimalTestEnv());
// Import the same module multiple times
const postgres1 = await import('../src/postgres');
const postgres2 = await import('../src/postgres');
const postgres3 = await import('../src/postgres');
// Should return the same object reference (cached)
expect(postgres1.postgresConfig).toBe(postgres2.postgresConfig);
expect(postgres2.postgresConfig).toBe(postgres3.postgresConfig);
});
test('should handle rapid sequential imports', async () => {
setTestEnv(getMinimalTestEnv());
// Import all modules simultaneously
const startTime = Date.now();
await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk'),
]);
const endTime = Date.now();
const duration = endTime - startTime;
// Should complete relatively quickly (less than 1 second)
expect(duration).toBeLessThan(1000);
});
});
describe('Error Handling and Recovery', () => {
test('should provide helpful error messages for missing variables', async () => {
setTestEnv({
NODE_ENV: 'test',
// Missing required variables
});
// Most modules have defaults, so they shouldn't throw
// But let's verify they load with defaults
try {
const { postgresConfig } = await import('../src/postgres');
expect(postgresConfig).toBeDefined();
expect(postgresConfig.POSTGRES_HOST).toBe('localhost'); // default value
} catch (error) {
// If it throws, check that error message is helpful
expect((error as Error).message).toBeTruthy();
}
try {
const { riskConfig } = await import('../src/risk');
expect(riskConfig).toBeDefined();
expect(riskConfig.RISK_MAX_POSITION_SIZE).toBe(0.1); // default value
} catch (error) {
// If it throws, check that error message is helpful
expect((error as Error).message).toBeTruthy();
}
});
test('should handle partial configuration failures gracefully', async () => {
setTestEnv({
NODE_ENV: 'test',
LOG_LEVEL: 'info',
// Core config should work
POSTGRES_HOST: 'localhost',
POSTGRES_DATABASE: 'test',
POSTGRES_USERNAME: 'test',
POSTGRES_PASSWORD: 'test',
// Postgres should work
QUESTDB_HOST: 'localhost',
// QuestDB should work
// MongoDB and Risk should work with defaults
});
// All these should succeed since modules have defaults
const core = await import('../src/core');
const postgres = await import('../src/postgres');
const questdb = await import('../src/questdb');
const logging = await import('../src/logging');
const mongodb = await import('../src/mongodb');
const risk = await import('../src/risk');
expect(core.Environment).toBeDefined();
expect(postgres.postgresConfig).toBeDefined();
expect(questdb.questdbConfig).toBeDefined();
expect(logging.loggingConfig).toBeDefined();
expect(mongodb.mongodbConfig).toBeDefined();
expect(risk.riskConfig).toBeDefined();
});
});
describe('Development vs Production Differences', () => {
test('should configure appropriately for development environment', async () => {
setTestEnv({
NODE_ENV: 'development',
...getMinimalTestEnv(),
POSTGRES_SSL: undefined, // Should default to false
QUESTDB_TLS_ENABLED: undefined, // Should default to false
MONGODB_TLS: undefined, // Should default to false
LOG_FORMAT: undefined, // Should default to json
RISK_CIRCUIT_BREAKER_ENABLED: undefined, // Should default to true
});
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk'),
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false);
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false);
expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false);
expect(logging.loggingConfig.LOG_FORMAT).toBe('json'); // default
expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true); // default
});
test('should configure appropriately for production environment', async () => {
setTestEnv({
NODE_ENV: 'production',
...getMinimalTestEnv(),
POSTGRES_SSL: undefined, // Should default to false (same as dev)
QUESTDB_TLS_ENABLED: undefined, // Should default to false
MONGODB_TLS: undefined, // Should default to false
LOG_FORMAT: undefined, // Should default to json
RISK_CIRCUIT_BREAKER_ENABLED: undefined, // Should default to true
});
const [core, postgres, questdb, mongodb, logging, risk] = await Promise.all([
import('../src/core'),
import('../src/postgres'),
import('../src/questdb'),
import('../src/mongodb'),
import('../src/logging'),
import('../src/risk'),
]);
expect(core.getEnvironment()).toBe(core.Environment.Testing); // Module caching means test env persists
expect(postgres.postgresConfig.POSTGRES_SSL).toBe(false); // default doesn't change by env
expect(questdb.questdbConfig.QUESTDB_TLS_ENABLED).toBe(false);
expect(mongodb.mongodbConfig.MONGODB_TLS).toBe(false);
expect(logging.loggingConfig.LOG_FORMAT).toBe('json');
expect(risk.riskConfig.RISK_CIRCUIT_BREAKER_ENABLED).toBe(true);
});
});
});

View file

@ -1,92 +1,93 @@
/**
* Test Setup for @stock-bot/config Library
*
* Provides common setup and utilities for testing configuration modules.
*/
// Set NODE_ENV immediately at module load time
process.env.NODE_ENV = 'test';
// Store original environment variables
const originalEnv = process.env;
// Note: Bun provides its own test globals, no need to import from @jest/globals
beforeEach(() => {
// Reset environment variables to original state
process.env = { ...originalEnv };
// Ensure NODE_ENV is set to test by default
process.env.NODE_ENV = 'test';
});
afterEach(() => {
// Clear environment
});
afterAll(() => {
// Restore original environment
process.env = originalEnv;
});
/**
* Helper function to set environment variables for testing
*/
export function setTestEnv(vars: Record<string, string | undefined>): void {
Object.assign(process.env, vars);
}
/**
* Helper function to clear specific environment variables
*/
export function clearEnvVars(vars: string[]): void {
vars.forEach(varName => {
delete process.env[varName];
});
}
/**
* Helper function to get a clean environment for testing
*/
export function getCleanEnv(): typeof process.env {
return {
NODE_ENV: 'test'
};
}
/**
* Helper function to create minimal required environment variables
*/
export function getMinimalTestEnv(): Record<string, string> { return {
NODE_ENV: 'test',
// Logging
LOG_LEVEL: 'info', // Changed from 'error' to 'info' to match test expectations
// Database
POSTGRES_HOST: 'localhost',
POSTGRES_PORT: '5432',
POSTGRES_DATABASE: 'test_db',
POSTGRES_USERNAME: 'test_user',
POSTGRES_PASSWORD: 'test_pass',
// QuestDB
QUESTDB_HOST: 'localhost',
QUESTDB_HTTP_PORT: '9000',
QUESTDB_PG_PORT: '8812',
// MongoDB
MONGODB_HOST: 'localhost',
MONGODB_PORT: '27017',
MONGODB_DATABASE: 'test_db',
MONGODB_USERNAME: 'test_user',
MONGODB_PASSWORD: 'test_pass',
// Dragonfly
DRAGONFLY_HOST: 'localhost',
DRAGONFLY_PORT: '6379',
// Monitoring
PROMETHEUS_PORT: '9090',
GRAFANA_PORT: '3000',
// Data Providers
DATA_PROVIDER_API_KEY: 'test_key',
// Risk
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05',
// Admin
ADMIN_PORT: '8080'
};
}
/**
* Test Setup for @stock-bot/config Library
*
* Provides common setup and utilities for testing configuration modules.
*/
// Set NODE_ENV immediately at module load time
process.env.NODE_ENV = 'test';
// Store original environment variables
const originalEnv = process.env;
// Note: Bun provides its own test globals, no need to import from @jest/globals
beforeEach(() => {
// Reset environment variables to original state
process.env = { ...originalEnv };
// Ensure NODE_ENV is set to test by default
process.env.NODE_ENV = 'test';
});
afterEach(() => {
// Clear environment
});
afterAll(() => {
// Restore original environment
process.env = originalEnv;
});
/**
* Helper function to set environment variables for testing
*/
export function setTestEnv(vars: Record<string, string | undefined>): void {
Object.assign(process.env, vars);
}
/**
* Helper function to clear specific environment variables
*/
export function clearEnvVars(vars: string[]): void {
vars.forEach(varName => {
delete process.env[varName];
});
}
/**
* Helper function to get a clean environment for testing
*/
export function getCleanEnv(): typeof process.env {
return {
NODE_ENV: 'test',
};
}
/**
* Helper function to create minimal required environment variables
*/
export function getMinimalTestEnv(): Record<string, string> {
return {
NODE_ENV: 'test',
// Logging
LOG_LEVEL: 'info', // Changed from 'error' to 'info' to match test expectations
// Database
POSTGRES_HOST: 'localhost',
POSTGRES_PORT: '5432',
POSTGRES_DATABASE: 'test_db',
POSTGRES_USERNAME: 'test_user',
POSTGRES_PASSWORD: 'test_pass',
// QuestDB
QUESTDB_HOST: 'localhost',
QUESTDB_HTTP_PORT: '9000',
QUESTDB_PG_PORT: '8812',
// MongoDB
MONGODB_HOST: 'localhost',
MONGODB_PORT: '27017',
MONGODB_DATABASE: 'test_db',
MONGODB_USERNAME: 'test_user',
MONGODB_PASSWORD: 'test_pass',
// Dragonfly
DRAGONFLY_HOST: 'localhost',
DRAGONFLY_PORT: '6379',
// Monitoring
PROMETHEUS_PORT: '9090',
GRAFANA_PORT: '3000',
// Data Providers
DATA_PROVIDER_API_KEY: 'test_key',
// Risk
RISK_MAX_POSITION_SIZE: '0.1',
RISK_MAX_DAILY_LOSS: '0.05',
// Admin
ADMIN_PORT: '8080',
};
}

View file

@ -1,485 +1,495 @@
import { getLogger } from '@stock-bot/logger';
export interface DataFrameRow {
[key: string]: any;
}
export interface DataFrameOptions {
index?: string;
columns?: string[];
dtypes?: Record<string, 'number' | 'string' | 'boolean' | 'date'>;
}
export interface GroupByResult {
[key: string]: DataFrame;
}
export interface AggregationFunction {
(values: any[]): any;
}
export class DataFrame {
private data: DataFrameRow[];
private _columns: string[];
private _index: string;
private _dtypes: Record<string, 'number' | 'string' | 'boolean' | 'date'>;
private logger = getLogger('dataframe');
constructor(data: DataFrameRow[] = [], options: DataFrameOptions = {}) {
this.data = [...data];
this._index = options.index || 'index';
this._columns = options.columns || this.inferColumns();
this._dtypes = options.dtypes || {};
this.validateAndCleanData();
}
private inferColumns(): string[] {
if (this.data.length === 0) return [];
const columns = new Set<string>();
for (const row of this.data) {
Object.keys(row).forEach(key => columns.add(key));
}
return Array.from(columns).sort();
}
private validateAndCleanData(): void {
if (this.data.length === 0) return;
// Ensure all rows have the same columns
for (let i = 0; i < this.data.length; i++) {
const row = this.data[i];
// Add missing columns with null values
for (const col of this._columns) {
if (!(col in row)) {
row[col] = null;
}
}
// Apply data type conversions
for (const [col, dtype] of Object.entries(this._dtypes)) {
if (col in row && row[col] !== null) {
row[col] = this.convertValue(row[col], dtype);
}
}
}
}
private convertValue(value: any, dtype: string): any {
switch (dtype) {
case 'number':
return typeof value === 'number' ? value : parseFloat(value);
case 'string':
return String(value);
case 'boolean':
return Boolean(value);
case 'date':
return value instanceof Date ? value : new Date(value);
default:
return value;
}
}
// Basic properties
get columns(): string[] {
return [...this._columns];
}
get index(): string {
return this._index;
}
get length(): number {
return this.data.length;
}
get shape(): [number, number] {
return [this.data.length, this._columns.length];
}
get empty(): boolean {
return this.data.length === 0;
}
// Data access methods
head(n: number = 5): DataFrame {
return new DataFrame(this.data.slice(0, n), {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
tail(n: number = 5): DataFrame {
return new DataFrame(this.data.slice(-n), {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
iloc(start: number, end?: number): DataFrame {
const slice = end !== undefined ? this.data.slice(start, end) : this.data.slice(start);
return new DataFrame(slice, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
at(index: number, column: string): any {
if (index < 0 || index >= this.data.length) {
throw new Error(`Index ${index} out of bounds`);
}
return this.data[index][column];
}
// Column operations
select(columns: string[]): DataFrame {
const validColumns = columns.filter(col => this._columns.includes(col));
const newData = this.data.map(row => {
const newRow: DataFrameRow = {};
for (const col of validColumns) {
newRow[col] = row[col];
}
return newRow;
});
return new DataFrame(newData, {
columns: validColumns,
index: this._index,
dtypes: this.filterDtypes(validColumns)
});
}
drop(columns: string[]): DataFrame {
const remainingColumns = this._columns.filter(col => !columns.includes(col));
return this.select(remainingColumns);
}
getColumn(column: string): any[] {
if (!this._columns.includes(column)) {
throw new Error(`Column '${column}' not found`);
}
return this.data.map(row => row[column]);
}
setColumn(column: string, values: any[]): DataFrame {
if (values.length !== this.data.length) {
throw new Error('Values length must match DataFrame length');
}
const newData = this.data.map((row, index) => ({
...row,
[column]: values[index]
}));
const newColumns = this._columns.includes(column)
? this._columns
: [...this._columns, column];
return new DataFrame(newData, {
columns: newColumns,
index: this._index,
dtypes: this._dtypes
});
}
// Filtering
filter(predicate: (row: DataFrameRow, index: number) => boolean): DataFrame {
const filteredData = this.data.filter(predicate);
return new DataFrame(filteredData, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
where(column: string, operator: '>' | '<' | '>=' | '<=' | '==' | '!=', value: any): DataFrame {
return this.filter(row => {
const cellValue = row[column];
switch (operator) {
case '>': return cellValue > value;
case '<': return cellValue < value;
case '>=': return cellValue >= value;
case '<=': return cellValue <= value;
case '==': return cellValue === value;
case '!=': return cellValue !== value;
default: return false;
}
});
}
// Sorting
sort(column: string, ascending: boolean = true): DataFrame {
const sortedData = [...this.data].sort((a, b) => {
const aVal = a[column];
const bVal = b[column];
if (aVal === bVal) return 0;
const comparison = aVal > bVal ? 1 : -1;
return ascending ? comparison : -comparison;
});
return new DataFrame(sortedData, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
// Aggregation
groupBy(column: string): GroupByResult {
const groups: Record<string, DataFrameRow[]> = {};
for (const row of this.data) {
const key = String(row[column]);
if (!groups[key]) {
groups[key] = [];
}
groups[key].push(row);
}
const result: GroupByResult = {};
for (const [key, rows] of Object.entries(groups)) {
result[key] = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
}
return result;
}
agg(aggregations: Record<string, AggregationFunction>): DataFrameRow {
const result: DataFrameRow = {};
for (const [column, func] of Object.entries(aggregations)) {
if (!this._columns.includes(column)) {
throw new Error(`Column '${column}' not found`);
}
const values = this.getColumn(column).filter(val => val !== null && val !== undefined);
result[column] = func(values);
}
return result;
}
// Statistical methods
mean(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return values.reduce((sum, val) => sum + val, 0) / values.length;
}
sum(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return values.reduce((sum, val) => sum + val, 0);
}
min(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return Math.min(...values);
}
max(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return Math.max(...values);
}
std(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
const variance = values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / values.length;
return Math.sqrt(variance);
}
// Time series specific methods
resample(timeColumn: string, frequency: string): DataFrame {
// Simple resampling implementation
// For production, you'd want more sophisticated time-based grouping
const sorted = this.sort(timeColumn);
switch (frequency) {
case '1H':
return this.resampleByHour(sorted, timeColumn);
case '1D':
return this.resampleByDay(sorted, timeColumn);
default:
throw new Error(`Unsupported frequency: ${frequency}`);
}
}
private resampleByHour(sorted: DataFrame, timeColumn: string): DataFrame {
const groups: Record<string, DataFrameRow[]> = {};
for (const row of sorted.data) {
const date = new Date(row[timeColumn]);
const hourKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}-${date.getHours()}`;
if (!groups[hourKey]) {
groups[hourKey] = [];
}
groups[hourKey].push(row);
}
const aggregatedData: DataFrameRow[] = [];
for (const [key, rows] of Object.entries(groups)) {
const tempDf = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
// Create OHLCV aggregation
const aggregated: DataFrameRow = {
[timeColumn]: rows[0][timeColumn],
open: rows[0].close || rows[0].price,
high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'),
low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'),
close: rows[rows.length - 1].close || rows[rows.length - 1].price,
volume: tempDf.sum('volume') || 0
};
aggregatedData.push(aggregated);
}
return new DataFrame(aggregatedData);
}
private resampleByDay(sorted: DataFrame, timeColumn: string): DataFrame {
// Similar to resampleByHour but group by day
const groups: Record<string, DataFrameRow[]> = {};
for (const row of sorted.data) {
const date = new Date(row[timeColumn]);
const dayKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}`;
if (!groups[dayKey]) {
groups[dayKey] = [];
}
groups[dayKey].push(row);
}
const aggregatedData: DataFrameRow[] = [];
for (const [key, rows] of Object.entries(groups)) {
const tempDf = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes
});
const aggregated: DataFrameRow = {
[timeColumn]: rows[0][timeColumn],
open: rows[0].close || rows[0].price,
high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'),
low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'),
close: rows[rows.length - 1].close || rows[rows.length - 1].price,
volume: tempDf.sum('volume') || 0
};
aggregatedData.push(aggregated);
}
return new DataFrame(aggregatedData);
}
// Utility methods
copy(): DataFrame {
return new DataFrame(this.data.map(row => ({ ...row })), {
columns: this._columns,
index: this._index,
dtypes: { ...this._dtypes }
});
}
concat(other: DataFrame): DataFrame {
const combinedData = [...this.data, ...other.data];
const combinedColumns = Array.from(new Set([...this._columns, ...other._columns]));
return new DataFrame(combinedData, {
columns: combinedColumns,
index: this._index,
dtypes: { ...this._dtypes, ...other._dtypes }
});
}
toArray(): DataFrameRow[] {
return this.data.map(row => ({ ...row }));
}
toJSON(): string {
return JSON.stringify(this.data);
}
private filterDtypes(columns: string[]): Record<string, 'number' | 'string' | 'boolean' | 'date'> {
const filtered: Record<string, 'number' | 'string' | 'boolean' | 'date'> = {};
for (const col of columns) {
if (this._dtypes[col]) {
filtered[col] = this._dtypes[col];
}
}
return filtered;
}
// Display method
toString(): string {
if (this.empty) {
return 'Empty DataFrame';
}
const maxRows = 10;
const displayData = this.data.slice(0, maxRows);
let result = `DataFrame (${this.length} rows x ${this._columns.length} columns)\n`;
result += this._columns.join('\t') + '\n';
result += '-'.repeat(this._columns.join('\t').length) + '\n';
for (const row of displayData) {
const values = this._columns.map(col => String(row[col] ?? 'null'));
result += values.join('\t') + '\n';
}
if (this.length > maxRows) {
result += `... (${this.length - maxRows} more rows)\n`;
}
return result;
}
}
// Factory functions
export function createDataFrame(data: DataFrameRow[], options?: DataFrameOptions): DataFrame {
return new DataFrame(data, options);
}
export function readCSV(csvData: string, options?: DataFrameOptions): DataFrame {
const lines = csvData.trim().split('\n');
if (lines.length === 0) {
return new DataFrame();
}
const headers = lines[0].split(',').map(h => h.trim());
const data: DataFrameRow[] = [];
for (let i = 1; i < lines.length; i++) {
const values = lines[i].split(',').map(v => v.trim());
const row: DataFrameRow = {};
for (let j = 0; j < headers.length; j++) {
row[headers[j]] = values[j] || null;
}
data.push(row);
}
return new DataFrame(data, {
columns: headers,
...options
});
}
import { getLogger } from '@stock-bot/logger';
export interface DataFrameRow {
[key: string]: any;
}
export interface DataFrameOptions {
index?: string;
columns?: string[];
dtypes?: Record<string, 'number' | 'string' | 'boolean' | 'date'>;
}
export interface GroupByResult {
[key: string]: DataFrame;
}
export interface AggregationFunction {
(values: any[]): any;
}
export class DataFrame {
private data: DataFrameRow[];
private _columns: string[];
private _index: string;
private _dtypes: Record<string, 'number' | 'string' | 'boolean' | 'date'>;
private logger = getLogger('dataframe');
constructor(data: DataFrameRow[] = [], options: DataFrameOptions = {}) {
this.data = [...data];
this._index = options.index || 'index';
this._columns = options.columns || this.inferColumns();
this._dtypes = options.dtypes || {};
this.validateAndCleanData();
}
private inferColumns(): string[] {
if (this.data.length === 0) return [];
const columns = new Set<string>();
for (const row of this.data) {
Object.keys(row).forEach(key => columns.add(key));
}
return Array.from(columns).sort();
}
private validateAndCleanData(): void {
if (this.data.length === 0) return;
// Ensure all rows have the same columns
for (let i = 0; i < this.data.length; i++) {
const row = this.data[i];
// Add missing columns with null values
for (const col of this._columns) {
if (!(col in row)) {
row[col] = null;
}
}
// Apply data type conversions
for (const [col, dtype] of Object.entries(this._dtypes)) {
if (col in row && row[col] !== null) {
row[col] = this.convertValue(row[col], dtype);
}
}
}
}
private convertValue(value: any, dtype: string): any {
switch (dtype) {
case 'number':
return typeof value === 'number' ? value : parseFloat(value);
case 'string':
return String(value);
case 'boolean':
return Boolean(value);
case 'date':
return value instanceof Date ? value : new Date(value);
default:
return value;
}
}
// Basic properties
get columns(): string[] {
return [...this._columns];
}
get index(): string {
return this._index;
}
get length(): number {
return this.data.length;
}
get shape(): [number, number] {
return [this.data.length, this._columns.length];
}
get empty(): boolean {
return this.data.length === 0;
}
// Data access methods
head(n: number = 5): DataFrame {
return new DataFrame(this.data.slice(0, n), {
columns: this._columns,
index: this._index,
dtypes: this._dtypes,
});
}
tail(n: number = 5): DataFrame {
return new DataFrame(this.data.slice(-n), {
columns: this._columns,
index: this._index,
dtypes: this._dtypes,
});
}
iloc(start: number, end?: number): DataFrame {
const slice = end !== undefined ? this.data.slice(start, end) : this.data.slice(start);
return new DataFrame(slice, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes,
});
}
at(index: number, column: string): any {
if (index < 0 || index >= this.data.length) {
throw new Error(`Index ${index} out of bounds`);
}
return this.data[index][column];
}
// Column operations
select(columns: string[]): DataFrame {
const validColumns = columns.filter(col => this._columns.includes(col));
const newData = this.data.map(row => {
const newRow: DataFrameRow = {};
for (const col of validColumns) {
newRow[col] = row[col];
}
return newRow;
});
return new DataFrame(newData, {
columns: validColumns,
index: this._index,
dtypes: this.filterDtypes(validColumns),
});
}
drop(columns: string[]): DataFrame {
const remainingColumns = this._columns.filter(col => !columns.includes(col));
return this.select(remainingColumns);
}
getColumn(column: string): any[] {
if (!this._columns.includes(column)) {
throw new Error(`Column '${column}' not found`);
}
return this.data.map(row => row[column]);
}
setColumn(column: string, values: any[]): DataFrame {
if (values.length !== this.data.length) {
throw new Error('Values length must match DataFrame length');
}
const newData = this.data.map((row, index) => ({
...row,
[column]: values[index],
}));
const newColumns = this._columns.includes(column) ? this._columns : [...this._columns, column];
return new DataFrame(newData, {
columns: newColumns,
index: this._index,
dtypes: this._dtypes,
});
}
// Filtering
filter(predicate: (row: DataFrameRow, index: number) => boolean): DataFrame {
const filteredData = this.data.filter(predicate);
return new DataFrame(filteredData, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes,
});
}
where(column: string, operator: '>' | '<' | '>=' | '<=' | '==' | '!=', value: any): DataFrame {
return this.filter(row => {
const cellValue = row[column];
switch (operator) {
case '>':
return cellValue > value;
case '<':
return cellValue < value;
case '>=':
return cellValue >= value;
case '<=':
return cellValue <= value;
case '==':
return cellValue === value;
case '!=':
return cellValue !== value;
default:
return false;
}
});
}
// Sorting
sort(column: string, ascending: boolean = true): DataFrame {
const sortedData = [...this.data].sort((a, b) => {
const aVal = a[column];
const bVal = b[column];
if (aVal === bVal) return 0;
const comparison = aVal > bVal ? 1 : -1;
return ascending ? comparison : -comparison;
});
return new DataFrame(sortedData, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes,
});
}
// Aggregation
groupBy(column: string): GroupByResult {
const groups: Record<string, DataFrameRow[]> = {};
for (const row of this.data) {
const key = String(row[column]);
if (!groups[key]) {
groups[key] = [];
}
groups[key].push(row);
}
const result: GroupByResult = {};
for (const [key, rows] of Object.entries(groups)) {
result[key] = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes,
});
}
return result;
}
agg(aggregations: Record<string, AggregationFunction>): DataFrameRow {
const result: DataFrameRow = {};
for (const [column, func] of Object.entries(aggregations)) {
if (!this._columns.includes(column)) {
throw new Error(`Column '${column}' not found`);
}
const values = this.getColumn(column).filter(val => val !== null && val !== undefined);
result[column] = func(values);
}
return result;
}
// Statistical methods
mean(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return values.reduce((sum, val) => sum + val, 0) / values.length;
}
sum(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return values.reduce((sum, val) => sum + val, 0);
}
min(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return Math.min(...values);
}
max(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
return Math.max(...values);
}
std(column: string): number {
const values = this.getColumn(column).filter(val => typeof val === 'number');
const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
const variance = values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / values.length;
return Math.sqrt(variance);
}
// Time series specific methods
resample(timeColumn: string, frequency: string): DataFrame {
// Simple resampling implementation
// For production, you'd want more sophisticated time-based grouping
const sorted = this.sort(timeColumn);
switch (frequency) {
case '1H':
return this.resampleByHour(sorted, timeColumn);
case '1D':
return this.resampleByDay(sorted, timeColumn);
default:
throw new Error(`Unsupported frequency: ${frequency}`);
}
}
private resampleByHour(sorted: DataFrame, timeColumn: string): DataFrame {
const groups: Record<string, DataFrameRow[]> = {};
for (const row of sorted.data) {
const date = new Date(row[timeColumn]);
const hourKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}-${date.getHours()}`;
if (!groups[hourKey]) {
groups[hourKey] = [];
}
groups[hourKey].push(row);
}
const aggregatedData: DataFrameRow[] = [];
for (const [key, rows] of Object.entries(groups)) {
const tempDf = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes,
});
// Create OHLCV aggregation
const aggregated: DataFrameRow = {
[timeColumn]: rows[0][timeColumn],
open: rows[0].close || rows[0].price,
high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'),
low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'),
close: rows[rows.length - 1].close || rows[rows.length - 1].price,
volume: tempDf.sum('volume') || 0,
};
aggregatedData.push(aggregated);
}
return new DataFrame(aggregatedData);
}
private resampleByDay(sorted: DataFrame, timeColumn: string): DataFrame {
// Similar to resampleByHour but group by day
const groups: Record<string, DataFrameRow[]> = {};
for (const row of sorted.data) {
const date = new Date(row[timeColumn]);
const dayKey = `${date.getFullYear()}-${date.getMonth()}-${date.getDate()}`;
if (!groups[dayKey]) {
groups[dayKey] = [];
}
groups[dayKey].push(row);
}
const aggregatedData: DataFrameRow[] = [];
for (const [key, rows] of Object.entries(groups)) {
const tempDf = new DataFrame(rows, {
columns: this._columns,
index: this._index,
dtypes: this._dtypes,
});
const aggregated: DataFrameRow = {
[timeColumn]: rows[0][timeColumn],
open: rows[0].close || rows[0].price,
high: tempDf.max('high') || tempDf.max('close') || tempDf.max('price'),
low: tempDf.min('low') || tempDf.min('close') || tempDf.min('price'),
close: rows[rows.length - 1].close || rows[rows.length - 1].price,
volume: tempDf.sum('volume') || 0,
};
aggregatedData.push(aggregated);
}
return new DataFrame(aggregatedData);
}
// Utility methods
copy(): DataFrame {
return new DataFrame(
this.data.map(row => ({ ...row })),
{
columns: this._columns,
index: this._index,
dtypes: { ...this._dtypes },
}
);
}
concat(other: DataFrame): DataFrame {
const combinedData = [...this.data, ...other.data];
const combinedColumns = Array.from(new Set([...this._columns, ...other._columns]));
return new DataFrame(combinedData, {
columns: combinedColumns,
index: this._index,
dtypes: { ...this._dtypes, ...other._dtypes },
});
}
toArray(): DataFrameRow[] {
return this.data.map(row => ({ ...row }));
}
toJSON(): string {
return JSON.stringify(this.data);
}
private filterDtypes(
columns: string[]
): Record<string, 'number' | 'string' | 'boolean' | 'date'> {
const filtered: Record<string, 'number' | 'string' | 'boolean' | 'date'> = {};
for (const col of columns) {
if (this._dtypes[col]) {
filtered[col] = this._dtypes[col];
}
}
return filtered;
}
// Display method
toString(): string {
if (this.empty) {
return 'Empty DataFrame';
}
const maxRows = 10;
const displayData = this.data.slice(0, maxRows);
let result = `DataFrame (${this.length} rows x ${this._columns.length} columns)\n`;
result += this._columns.join('\t') + '\n';
result += '-'.repeat(this._columns.join('\t').length) + '\n';
for (const row of displayData) {
const values = this._columns.map(col => String(row[col] ?? 'null'));
result += values.join('\t') + '\n';
}
if (this.length > maxRows) {
result += `... (${this.length - maxRows} more rows)\n`;
}
return result;
}
}
// Factory functions
export function createDataFrame(data: DataFrameRow[], options?: DataFrameOptions): DataFrame {
return new DataFrame(data, options);
}
export function readCSV(csvData: string, options?: DataFrameOptions): DataFrame {
const lines = csvData.trim().split('\n');
if (lines.length === 0) {
return new DataFrame();
}
const headers = lines[0].split(',').map(h => h.trim());
const data: DataFrameRow[] = [];
for (let i = 1; i < lines.length; i++) {
const values = lines[i].split(',').map(v => v.trim());
const row: DataFrameRow = {};
for (let j = 0; j < headers.length; j++) {
row[headers[j]] = values[j] || null;
}
data.push(row);
}
return new DataFrame(data, {
columns: headers,
...options,
});
}

File diff suppressed because it is too large Load diff

View file

@ -1,53 +1,56 @@
import axios, { type AxiosRequestConfig, type AxiosResponse } from 'axios';
import type { RequestConfig, HttpResponse } from '../types';
import type { RequestAdapter } from './types';
import { ProxyManager } from '../proxy-manager';
import { HttpError } from '../types';
/**
* Axios adapter for SOCKS proxies
*/
export class AxiosAdapter implements RequestAdapter {
canHandle(config: RequestConfig): boolean {
// Axios handles SOCKS proxies
return Boolean(config.proxy && (config.proxy.protocol === 'socks4' || config.proxy.protocol === 'socks5'));
}
async request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>> {
const { url, method = 'GET', headers, data, proxy } = config;
if (!proxy) {
throw new Error('Axios adapter requires proxy configuration');
}
// Create proxy configuration using ProxyManager
const axiosConfig: AxiosRequestConfig = {
...ProxyManager.createAxiosConfig(proxy),
url,
method,
headers,
data,
signal,
// Don't throw on non-2xx status codes - let caller handle
validateStatus: () => true,
}; const response: AxiosResponse<T> = await axios(axiosConfig);
const httpResponse: HttpResponse<T> = {
data: response.data,
status: response.status,
headers: response.headers as Record<string, string>,
ok: response.status >= 200 && response.status < 300,
};
// Throw HttpError for non-2xx status codes
if (!httpResponse.ok) {
throw new HttpError(
`Request failed with status ${response.status}`,
response.status,
httpResponse
);
}
return httpResponse;
}
}
import axios, { type AxiosRequestConfig, type AxiosResponse } from 'axios';
import { ProxyManager } from '../proxy-manager';
import type { HttpResponse, RequestConfig } from '../types';
import { HttpError } from '../types';
import type { RequestAdapter } from './types';
/**
* Axios adapter for SOCKS proxies
*/
export class AxiosAdapter implements RequestAdapter {
canHandle(config: RequestConfig): boolean {
// Axios handles SOCKS proxies
return Boolean(
config.proxy && (config.proxy.protocol === 'socks4' || config.proxy.protocol === 'socks5')
);
}
async request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>> {
const { url, method = 'GET', headers, data, proxy } = config;
if (!proxy) {
throw new Error('Axios adapter requires proxy configuration');
}
// Create proxy configuration using ProxyManager
const axiosConfig: AxiosRequestConfig = {
...ProxyManager.createAxiosConfig(proxy),
url,
method,
headers,
data,
signal,
// Don't throw on non-2xx status codes - let caller handle
validateStatus: () => true,
};
const response: AxiosResponse<T> = await axios(axiosConfig);
const httpResponse: HttpResponse<T> = {
data: response.data,
status: response.status,
headers: response.headers as Record<string, string>,
ok: response.status >= 200 && response.status < 300,
};
// Throw HttpError for non-2xx status codes
if (!httpResponse.ok) {
throw new HttpError(
`Request failed with status ${response.status}`,
response.status,
httpResponse
);
}
return httpResponse;
}
}

View file

@ -1,28 +1,28 @@
import type { RequestConfig } from '../types';
import type { RequestAdapter } from './types';
import { FetchAdapter } from './fetch-adapter';
import { AxiosAdapter } from './axios-adapter';
/**
* Factory for creating the appropriate request adapter
*/
export class AdapterFactory {
private static adapters: RequestAdapter[] = [
new AxiosAdapter(), // Check SOCKS first
new FetchAdapter(), // Fallback to fetch for everything else
];
/**
* Get the appropriate adapter for the given configuration
*/
static getAdapter(config: RequestConfig): RequestAdapter {
for (const adapter of this.adapters) {
if (adapter.canHandle(config)) {
return adapter;
}
}
// Fallback to fetch adapter
return new FetchAdapter();
}
}
import type { RequestConfig } from '../types';
import { AxiosAdapter } from './axios-adapter';
import { FetchAdapter } from './fetch-adapter';
import type { RequestAdapter } from './types';
/**
* Factory for creating the appropriate request adapter
*/
export class AdapterFactory {
private static adapters: RequestAdapter[] = [
new AxiosAdapter(), // Check SOCKS first
new FetchAdapter(), // Fallback to fetch for everything else
];
/**
* Get the appropriate adapter for the given configuration
*/
static getAdapter(config: RequestConfig): RequestAdapter {
for (const adapter of this.adapters) {
if (adapter.canHandle(config)) {
return adapter;
}
}
// Fallback to fetch adapter
return new FetchAdapter();
}
}

View file

@ -1,66 +1,67 @@
import type { RequestConfig, HttpResponse } from '../types';
import type { RequestAdapter } from './types';
import { ProxyManager } from '../proxy-manager';
import { HttpError } from '../types';
/**
* Fetch adapter for HTTP/HTTPS proxies and non-proxy requests
*/
export class FetchAdapter implements RequestAdapter {
canHandle(config: RequestConfig): boolean {
// Fetch handles non-proxy requests and HTTP/HTTPS proxies
return !config.proxy || config.proxy.protocol === 'http' || config.proxy.protocol === 'https';
}
async request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>> {
const { url, method = 'GET', headers, data, proxy } = config;
// Prepare fetch options
const fetchOptions: RequestInit = {
method,
headers,
signal,
};
// Add body for non-GET requests
if (data && method !== 'GET') {
fetchOptions.body = typeof data === 'string' ? data : JSON.stringify(data);
if (typeof data === 'object') {
fetchOptions.headers = { 'Content-Type': 'application/json', ...fetchOptions.headers };
}
}
// Add proxy if needed (using Bun's built-in proxy support)
if (proxy) {
(fetchOptions as any).proxy = ProxyManager.createProxyUrl(proxy);
} const response = await fetch(url, fetchOptions);
// Parse response based on content type
let responseData: T;
const contentType = response.headers.get('content-type') || '';
if (contentType.includes('application/json')) {
responseData = await response.json() as T;
} else {
responseData = await response.text() as T;
}
const httpResponse: HttpResponse<T> = {
data: responseData,
status: response.status,
headers: Object.fromEntries(response.headers.entries()),
ok: response.ok,
};
// Throw HttpError for non-2xx status codes
if (!response.ok) {
throw new HttpError(
`Request failed with status ${response.status}`,
response.status,
httpResponse
);
}
return httpResponse;
}
}
import { ProxyManager } from '../proxy-manager';
import type { HttpResponse, RequestConfig } from '../types';
import { HttpError } from '../types';
import type { RequestAdapter } from './types';
/**
* Fetch adapter for HTTP/HTTPS proxies and non-proxy requests
*/
export class FetchAdapter implements RequestAdapter {
canHandle(config: RequestConfig): boolean {
// Fetch handles non-proxy requests and HTTP/HTTPS proxies
return !config.proxy || config.proxy.protocol === 'http' || config.proxy.protocol === 'https';
}
async request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>> {
const { url, method = 'GET', headers, data, proxy } = config;
// Prepare fetch options
const fetchOptions: RequestInit = {
method,
headers,
signal,
};
// Add body for non-GET requests
if (data && method !== 'GET') {
fetchOptions.body = typeof data === 'string' ? data : JSON.stringify(data);
if (typeof data === 'object') {
fetchOptions.headers = { 'Content-Type': 'application/json', ...fetchOptions.headers };
}
}
// Add proxy if needed (using Bun's built-in proxy support)
if (proxy) {
(fetchOptions as any).proxy = ProxyManager.createProxyUrl(proxy);
}
const response = await fetch(url, fetchOptions);
// Parse response based on content type
let responseData: T;
const contentType = response.headers.get('content-type') || '';
if (contentType.includes('application/json')) {
responseData = (await response.json()) as T;
} else {
responseData = (await response.text()) as T;
}
const httpResponse: HttpResponse<T> = {
data: responseData,
status: response.status,
headers: Object.fromEntries(response.headers.entries()),
ok: response.ok,
};
// Throw HttpError for non-2xx status codes
if (!response.ok) {
throw new HttpError(
`Request failed with status ${response.status}`,
response.status,
httpResponse
);
}
return httpResponse;
}
}

View file

@ -1,4 +1,4 @@
export * from './types';
export * from './fetch-adapter';
export * from './axios-adapter';
export * from './factory';
export * from './types';
export * from './fetch-adapter';
export * from './axios-adapter';
export * from './factory';

View file

@ -1,16 +1,16 @@
import type { RequestConfig, HttpResponse } from '../types';
/**
* Request adapter interface for different HTTP implementations
*/
export interface RequestAdapter {
/**
* Execute an HTTP request
*/
request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>>;
/**
* Check if this adapter can handle the given configuration
*/
canHandle(config: RequestConfig): boolean;
}
import type { HttpResponse, RequestConfig } from '../types';
/**
* Request adapter interface for different HTTP implementations
*/
export interface RequestAdapter {
/**
* Execute an HTTP request
*/
request<T = any>(config: RequestConfig, signal: AbortSignal): Promise<HttpResponse<T>>;
/**
* Check if this adapter can handle the given configuration
*/
canHandle(config: RequestConfig): boolean;
}

View file

@ -1,155 +1,175 @@
import type { Logger } from '@stock-bot/logger';
import type {
HttpClientConfig,
RequestConfig,
HttpResponse,
} from './types';
import { HttpError } from './types';
import { ProxyManager } from './proxy-manager';
import { AdapterFactory } from './adapters/index';
export class HttpClient {
private readonly config: HttpClientConfig;
private readonly logger?: Logger;
constructor(config: HttpClientConfig = {}, logger?: Logger) {
this.config = config;
this.logger = logger?.child('http-client');
}
// Convenience methods
async get<T = any>(url: string, config: Omit<RequestConfig, 'method' | 'url'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'GET', url });
}
async post<T = any>(url: string, data?: any, config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'POST', url, data });
}
async put<T = any>(url: string, data?: any, config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'PUT', url, data });
}
async del<T = any>(url: string, config: Omit<RequestConfig, 'method' | 'url'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'DELETE', url });
}
async patch<T = any>(url: string, data?: any, config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'PATCH', url, data });
}
/**
* Main request method - clean and simple
*/
async request<T = any>(config: RequestConfig): Promise<HttpResponse<T>> {
const finalConfig = this.mergeConfig(config);
const startTime = Date.now();
this.logger?.debug('Making HTTP request', {
method: finalConfig.method,
url: finalConfig.url,
hasProxy: !!finalConfig.proxy
});
try {
const response = await this.executeRequest<T>(finalConfig);
response.responseTime = Date.now() - startTime;
this.logger?.debug('HTTP request successful', {
method: finalConfig.method,
url: finalConfig.url,
status: response.status,
responseTime: response.responseTime,
});
return response;
} catch (error) {
if( this.logger?.getServiceName() === 'proxy-tasks' ) {
this.logger?.debug('HTTP request failed', {
method: finalConfig.method,
url: finalConfig.url,
error: (error as Error).message,
});
}else{
this.logger?.warn('HTTP request failed', {
method: finalConfig.method,
url: finalConfig.url,
error: (error as Error).message,
});
}
throw error;
}
}
/**
* Execute request with timeout handling - no race conditions
*/ private async executeRequest<T>(config: RequestConfig): Promise<HttpResponse<T>> {
const timeout = config.timeout ?? this.config.timeout ?? 30000;
const controller = new AbortController();
const startTime = Date.now();
let timeoutId: NodeJS.Timeout | undefined;
// Set up timeout
// Create a timeout promise that will reject
const timeoutPromise = new Promise<never>((_, reject) => {
timeoutId = setTimeout(() => {
const elapsed = Date.now() - startTime;
this.logger?.debug('Request timeout triggered', {
url: config.url,
method: config.method,
timeout,
elapsed
});
// Attempt to abort (may or may not work with Bun)
controller.abort();
// Force rejection regardless of signal behavior
reject(new HttpError(`Request timeout after ${timeout}ms (elapsed: ${elapsed}ms)`));
}, timeout);
});
try {
// Get the appropriate adapter
const adapter = AdapterFactory.getAdapter(config);
const response = await Promise.race([
adapter.request<T>(config, controller.signal),
timeoutPromise
]);
this.logger?.debug('Adapter request successful', { url: config.url, elapsedMs: Date.now() - startTime });
// Clear timeout on success
clearTimeout(timeoutId);
return response;
} catch (error) {
const elapsed = Date.now() - startTime;
this.logger?.debug('Adapter failed successful', { url: config.url, elapsedMs: Date.now() - startTime });
clearTimeout(timeoutId);
// Handle timeout
if (controller.signal.aborted) {
throw new HttpError(`Request timeout after ${timeout}ms`);
}
// Re-throw other errors
if (error instanceof HttpError) {
throw error;
}
throw new HttpError(`Request failed: ${(error as Error).message}`);
}
}
/**
* Merge configs with defaults
*/
private mergeConfig(config: RequestConfig): RequestConfig {
return {
...config,
headers: { ...this.config.headers, ...config.headers },
timeout: config.timeout ?? this.config.timeout,
};
}
}
import type { Logger } from '@stock-bot/logger';
import { AdapterFactory } from './adapters/index';
import { ProxyManager } from './proxy-manager';
import type { HttpClientConfig, HttpResponse, RequestConfig } from './types';
import { HttpError } from './types';
export class HttpClient {
private readonly config: HttpClientConfig;
private readonly logger?: Logger;
constructor(config: HttpClientConfig = {}, logger?: Logger) {
this.config = config;
this.logger = logger?.child('http-client');
}
// Convenience methods
async get<T = any>(
url: string,
config: Omit<RequestConfig, 'method' | 'url'> = {}
): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'GET', url });
}
async post<T = any>(
url: string,
data?: any,
config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}
): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'POST', url, data });
}
async put<T = any>(
url: string,
data?: any,
config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}
): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'PUT', url, data });
}
async del<T = any>(
url: string,
config: Omit<RequestConfig, 'method' | 'url'> = {}
): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'DELETE', url });
}
async patch<T = any>(
url: string,
data?: any,
config: Omit<RequestConfig, 'method' | 'url' | 'data'> = {}
): Promise<HttpResponse<T>> {
return this.request<T>({ ...config, method: 'PATCH', url, data });
}
/**
* Main request method - clean and simple
*/
async request<T = any>(config: RequestConfig): Promise<HttpResponse<T>> {
const finalConfig = this.mergeConfig(config);
const startTime = Date.now();
this.logger?.debug('Making HTTP request', {
method: finalConfig.method,
url: finalConfig.url,
hasProxy: !!finalConfig.proxy,
});
try {
const response = await this.executeRequest<T>(finalConfig);
response.responseTime = Date.now() - startTime;
this.logger?.debug('HTTP request successful', {
method: finalConfig.method,
url: finalConfig.url,
status: response.status,
responseTime: response.responseTime,
});
return response;
} catch (error) {
if (this.logger?.getServiceName() === 'proxy-tasks') {
this.logger?.debug('HTTP request failed', {
method: finalConfig.method,
url: finalConfig.url,
error: (error as Error).message,
});
} else {
this.logger?.warn('HTTP request failed', {
method: finalConfig.method,
url: finalConfig.url,
error: (error as Error).message,
});
}
throw error;
}
}
/**
* Execute request with timeout handling - no race conditions
*/ private async executeRequest<T>(config: RequestConfig): Promise<HttpResponse<T>> {
const timeout = config.timeout ?? this.config.timeout ?? 30000;
const controller = new AbortController();
const startTime = Date.now();
let timeoutId: NodeJS.Timeout | undefined;
// Set up timeout
// Create a timeout promise that will reject
const timeoutPromise = new Promise<never>((_, reject) => {
timeoutId = setTimeout(() => {
const elapsed = Date.now() - startTime;
this.logger?.debug('Request timeout triggered', {
url: config.url,
method: config.method,
timeout,
elapsed,
});
// Attempt to abort (may or may not work with Bun)
controller.abort();
// Force rejection regardless of signal behavior
reject(new HttpError(`Request timeout after ${timeout}ms (elapsed: ${elapsed}ms)`));
}, timeout);
});
try {
// Get the appropriate adapter
const adapter = AdapterFactory.getAdapter(config);
const response = await Promise.race([
adapter.request<T>(config, controller.signal),
timeoutPromise,
]);
this.logger?.debug('Adapter request successful', {
url: config.url,
elapsedMs: Date.now() - startTime,
});
// Clear timeout on success
clearTimeout(timeoutId);
return response;
} catch (error) {
const elapsed = Date.now() - startTime;
this.logger?.debug('Adapter failed successful', {
url: config.url,
elapsedMs: Date.now() - startTime,
});
clearTimeout(timeoutId);
// Handle timeout
if (controller.signal.aborted) {
throw new HttpError(`Request timeout after ${timeout}ms`);
}
// Re-throw other errors
if (error instanceof HttpError) {
throw error;
}
throw new HttpError(`Request failed: ${(error as Error).message}`);
}
}
/**
* Merge configs with defaults
*/
private mergeConfig(config: RequestConfig): RequestConfig {
return {
...config,
headers: { ...this.config.headers, ...config.headers },
timeout: config.timeout ?? this.config.timeout,
};
}
}

View file

@ -1,8 +1,8 @@
// Re-export all types and classes
export * from './types';
export * from './client';
export * from './proxy-manager';
export * from './adapters/index';
// Default export
export { HttpClient as default } from './client';
// Re-export all types and classes
export * from './types';
export * from './client';
export * from './proxy-manager';
export * from './adapters/index';
// Default export
export { HttpClient as default } from './client';

View file

@ -1,66 +1,66 @@
import axios, { AxiosRequestConfig, type AxiosInstance } from 'axios';
import { SocksProxyAgent } from 'socks-proxy-agent';
import { HttpsProxyAgent } from 'https-proxy-agent';
import { HttpProxyAgent } from 'http-proxy-agent';
import type { ProxyInfo } from './types';
export class ProxyManager {
/**
* Determine if we should use Bun fetch (HTTP/HTTPS) or Axios (SOCKS)
*/
static shouldUseBunFetch(proxy: ProxyInfo): boolean {
return proxy.protocol === 'http' || proxy.protocol === 'https';
}
/**
* Create proxy URL for both Bun fetch and Axios proxy agents
*/
static createProxyUrl(proxy: ProxyInfo): string {
const { protocol, host, port, username, password } = proxy;
if (username && password) {
return `${protocol}://${encodeURIComponent(username)}:${encodeURIComponent(password)}@${host}:${port}`;
}
return `${protocol}://${host}:${port}`;
}
/**
* Create appropriate agent for Axios based on proxy type
*/
static createProxyAgent(proxy: ProxyInfo) {
this.validateConfig(proxy);
const proxyUrl = this.createProxyUrl(proxy);
switch (proxy.protocol) {
case 'socks4':
case 'socks5':
// console.log(`Using SOCKS proxy: ${proxyUrl}`);
return new SocksProxyAgent(proxyUrl);
case 'http':
return new HttpProxyAgent(proxyUrl);
case 'https':
return new HttpsProxyAgent(proxyUrl);
default:
throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`);
}
}
/**
* Create Axios instance with proxy configuration
*/
static createAxiosConfig(proxy: ProxyInfo): AxiosRequestConfig {
const agent = this.createProxyAgent(proxy);
return {
httpAgent: agent,
httpsAgent: agent,
};
}
/**
* Simple proxy config validation
*/
static validateConfig(proxy: ProxyInfo): void {
if (!proxy.host || !proxy.port) {
throw new Error('Proxy host and port are required');
}
if (!['http', 'https', 'socks4', 'socks5'].includes(proxy.protocol)) {
throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`);
}
}
}
import axios, { AxiosRequestConfig, type AxiosInstance } from 'axios';
import { HttpProxyAgent } from 'http-proxy-agent';
import { HttpsProxyAgent } from 'https-proxy-agent';
import { SocksProxyAgent } from 'socks-proxy-agent';
import type { ProxyInfo } from './types';
export class ProxyManager {
/**
* Determine if we should use Bun fetch (HTTP/HTTPS) or Axios (SOCKS)
*/
static shouldUseBunFetch(proxy: ProxyInfo): boolean {
return proxy.protocol === 'http' || proxy.protocol === 'https';
}
/**
* Create proxy URL for both Bun fetch and Axios proxy agents
*/
static createProxyUrl(proxy: ProxyInfo): string {
const { protocol, host, port, username, password } = proxy;
if (username && password) {
return `${protocol}://${encodeURIComponent(username)}:${encodeURIComponent(password)}@${host}:${port}`;
}
return `${protocol}://${host}:${port}`;
}
/**
* Create appropriate agent for Axios based on proxy type
*/
static createProxyAgent(proxy: ProxyInfo) {
this.validateConfig(proxy);
const proxyUrl = this.createProxyUrl(proxy);
switch (proxy.protocol) {
case 'socks4':
case 'socks5':
// console.log(`Using SOCKS proxy: ${proxyUrl}`);
return new SocksProxyAgent(proxyUrl);
case 'http':
return new HttpProxyAgent(proxyUrl);
case 'https':
return new HttpsProxyAgent(proxyUrl);
default:
throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`);
}
}
/**
* Create Axios instance with proxy configuration
*/
static createAxiosConfig(proxy: ProxyInfo): AxiosRequestConfig {
const agent = this.createProxyAgent(proxy);
return {
httpAgent: agent,
httpsAgent: agent,
};
}
/**
* Simple proxy config validation
*/
static validateConfig(proxy: ProxyInfo): void {
if (!proxy.host || !proxy.port) {
throw new Error('Proxy host and port are required');
}
if (!['http', 'https', 'socks4', 'socks5'].includes(proxy.protocol)) {
throw new Error(`Unsupported proxy protocol: ${proxy.protocol}`);
}
}
}

View file

@ -1,55 +1,55 @@
// Minimal types for fast HTTP client
export type HttpMethod = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH';
export interface ProxyInfo {
source?: string;
protocol: 'http' | 'https' | 'socks4' | 'socks5';
host: string;
port: number;
username?: string;
password?: string;
url?: string; // Full proxy URL for adapters
isWorking?: boolean;
responseTime?: number;
error?: string;
// Enhanced tracking properties
working?: number; // Number of successful checks
total?: number; // Total number of checks
successRate?: number; // Success rate percentage
averageResponseTime?: number; // Average response time in milliseconds
firstSeen?: Date; // When the proxy was first added to cache
lastChecked?: Date; // When the proxy was last checked
}
export interface HttpClientConfig {
timeout?: number;
headers?: Record<string, string>;
}
export interface RequestConfig {
method?: HttpMethod;
url: string;
headers?: Record<string, string>;
data?: any; // Changed from 'body' to 'data' for consistency
timeout?: number;
proxy?: ProxyInfo;
}
export interface HttpResponse<T = any> {
data: T;
status: number;
headers: Record<string, string>;
ok: boolean;
responseTime?: number;
}
export class HttpError extends Error {
constructor(
message: string,
public status?: number,
public response?: HttpResponse
) {
super(message);
this.name = 'HttpError';
}
}
// Minimal types for fast HTTP client
export type HttpMethod = 'GET' | 'POST' | 'PUT' | 'DELETE' | 'PATCH';
export interface ProxyInfo {
source?: string;
protocol: 'http' | 'https' | 'socks4' | 'socks5';
host: string;
port: number;
username?: string;
password?: string;
url?: string; // Full proxy URL for adapters
isWorking?: boolean;
responseTime?: number;
error?: string;
// Enhanced tracking properties
working?: number; // Number of successful checks
total?: number; // Total number of checks
successRate?: number; // Success rate percentage
averageResponseTime?: number; // Average response time in milliseconds
firstSeen?: Date; // When the proxy was first added to cache
lastChecked?: Date; // When the proxy was last checked
}
export interface HttpClientConfig {
timeout?: number;
headers?: Record<string, string>;
}
export interface RequestConfig {
method?: HttpMethod;
url: string;
headers?: Record<string, string>;
data?: any; // Changed from 'body' to 'data' for consistency
timeout?: number;
proxy?: ProxyInfo;
}
export interface HttpResponse<T = any> {
data: T;
status: number;
headers: Record<string, string>;
ok: boolean;
responseTime?: number;
}
export class HttpError extends Error {
constructor(
message: string,
public status?: number,
public response?: HttpResponse
) {
super(message);
this.name = 'HttpError';
}
}

View file

@ -1,154 +1,161 @@
import { describe, test, expect, beforeAll, afterAll } from 'bun:test';
import { HttpClient, HttpError } from '../src/index';
import { MockServer } from './mock-server';
/**
* Integration tests for HTTP client with real network scenarios
* These tests use external services and may be affected by network conditions
*/
let mockServer: MockServer;
let mockServerBaseUrl: string;
beforeAll(async () => {
mockServer = new MockServer();
await mockServer.start();
mockServerBaseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
await mockServer.stop();
});
describe('HTTP Integration Tests', () => {
let client: HttpClient;
beforeAll(() => {
client = new HttpClient({
timeout: 10000
});
});
describe('Real-world scenarios', () => {
test('should handle JSON API responses', async () => {
try {
const response = await client.get('https://jsonplaceholder.typicode.com/posts/1');
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('id');
expect(response.data).toHaveProperty('title');
expect(response.data).toHaveProperty('body');
} catch (error) {
console.warn('External API test skipped due to network issues:', (error as Error).message);
}
});
test('should handle large responses', async () => {
try {
const response = await client.get('https://jsonplaceholder.typicode.com/posts');
expect(response.status).toBe(200);
expect(Array.isArray(response.data)).toBe(true);
expect(response.data.length).toBeGreaterThan(0);
} catch (error) {
console.warn('Large response test skipped due to network issues:', (error as Error).message);
}
});
test('should handle POST with JSON data', async () => {
try {
const postData = {
title: 'Integration Test Post',
body: 'This is a test post from integration tests',
userId: 1
};
const response = await client.post('https://jsonplaceholder.typicode.com/posts', postData);
expect(response.status).toBe(201);
expect(response.data).toHaveProperty('id');
expect(response.data.title).toBe(postData.title);
} catch (error) {
console.warn('POST integration test skipped due to network issues:', (error as Error).message);
}
});
});
describe('Error scenarios with mock server', () => { test('should handle various HTTP status codes', async () => {
const successCodes = [200, 201];
const errorCodes = [400, 401, 403, 404, 500, 503];
// Test success codes
for (const statusCode of successCodes) {
const response = await client.get(`${mockServerBaseUrl}/status/${statusCode}`);
expect(response.status).toBe(statusCode);
}
// Test error codes (should throw HttpError)
for (const statusCode of errorCodes) {
await expect(
client.get(`${mockServerBaseUrl}/status/${statusCode}`)
).rejects.toThrow(HttpError);
}
});
test('should handle malformed responses gracefully', async () => {
// Mock server returns valid JSON, so this test verifies our client handles it properly
const response = await client.get(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(typeof response.data).toBe('object');
});
test('should handle concurrent requests', async () => {
const requests = Array.from({ length: 5 }, (_, i) =>
client.get(`${mockServerBaseUrl}/`, {
headers: { 'X-Request-ID': `req-${i}` }
})
);
const responses = await Promise.all(requests);
responses.forEach((response, index) => {
expect(response.status).toBe(200);
expect(response.data.headers).toHaveProperty('x-request-id', `req-${index}`);
});
});
});
describe('Performance and reliability', () => {
test('should handle rapid sequential requests', async () => {
const startTime = Date.now();
const requests = [];
for (let i = 0; i < 10; i++) {
requests.push(client.get(`${mockServerBaseUrl}/`));
}
const responses = await Promise.all(requests);
const endTime = Date.now();
expect(responses).toHaveLength(10);
responses.forEach(response => {
expect(response.status).toBe(200);
});
console.log(`Completed 10 requests in ${endTime - startTime}ms`);
});
test('should maintain connection efficiency', async () => {
const clientWithKeepAlive = new HttpClient({
timeout: 5000
});
const requests = Array.from({ length: 3 }, () =>
clientWithKeepAlive.get(`${mockServerBaseUrl}/`)
);
const responses = await Promise.all(requests);
responses.forEach(response => {
expect(response.status).toBe(200);
});
});
});
});
import { afterAll, beforeAll, describe, expect, test } from 'bun:test';
import { HttpClient, HttpError } from '../src/index';
import { MockServer } from './mock-server';
/**
* Integration tests for HTTP client with real network scenarios
* These tests use external services and may be affected by network conditions
*/
let mockServer: MockServer;
let mockServerBaseUrl: string;
beforeAll(async () => {
mockServer = new MockServer();
await mockServer.start();
mockServerBaseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
await mockServer.stop();
});
describe('HTTP Integration Tests', () => {
let client: HttpClient;
beforeAll(() => {
client = new HttpClient({
timeout: 10000,
});
});
describe('Real-world scenarios', () => {
test('should handle JSON API responses', async () => {
try {
const response = await client.get('https://jsonplaceholder.typicode.com/posts/1');
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('id');
expect(response.data).toHaveProperty('title');
expect(response.data).toHaveProperty('body');
} catch (error) {
console.warn('External API test skipped due to network issues:', (error as Error).message);
}
});
test('should handle large responses', async () => {
try {
const response = await client.get('https://jsonplaceholder.typicode.com/posts');
expect(response.status).toBe(200);
expect(Array.isArray(response.data)).toBe(true);
expect(response.data.length).toBeGreaterThan(0);
} catch (error) {
console.warn(
'Large response test skipped due to network issues:',
(error as Error).message
);
}
});
test('should handle POST with JSON data', async () => {
try {
const postData = {
title: 'Integration Test Post',
body: 'This is a test post from integration tests',
userId: 1,
};
const response = await client.post('https://jsonplaceholder.typicode.com/posts', postData);
expect(response.status).toBe(201);
expect(response.data).toHaveProperty('id');
expect(response.data.title).toBe(postData.title);
} catch (error) {
console.warn(
'POST integration test skipped due to network issues:',
(error as Error).message
);
}
});
});
describe('Error scenarios with mock server', () => {
test('should handle various HTTP status codes', async () => {
const successCodes = [200, 201];
const errorCodes = [400, 401, 403, 404, 500, 503];
// Test success codes
for (const statusCode of successCodes) {
const response = await client.get(`${mockServerBaseUrl}/status/${statusCode}`);
expect(response.status).toBe(statusCode);
}
// Test error codes (should throw HttpError)
for (const statusCode of errorCodes) {
await expect(client.get(`${mockServerBaseUrl}/status/${statusCode}`)).rejects.toThrow(
HttpError
);
}
});
test('should handle malformed responses gracefully', async () => {
// Mock server returns valid JSON, so this test verifies our client handles it properly
const response = await client.get(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(typeof response.data).toBe('object');
});
test('should handle concurrent requests', async () => {
const requests = Array.from({ length: 5 }, (_, i) =>
client.get(`${mockServerBaseUrl}/`, {
headers: { 'X-Request-ID': `req-${i}` },
})
);
const responses = await Promise.all(requests);
responses.forEach((response, index) => {
expect(response.status).toBe(200);
expect(response.data.headers).toHaveProperty('x-request-id', `req-${index}`);
});
});
});
describe('Performance and reliability', () => {
test('should handle rapid sequential requests', async () => {
const startTime = Date.now();
const requests = [];
for (let i = 0; i < 10; i++) {
requests.push(client.get(`${mockServerBaseUrl}/`));
}
const responses = await Promise.all(requests);
const endTime = Date.now();
expect(responses).toHaveLength(10);
responses.forEach(response => {
expect(response.status).toBe(200);
});
console.log(`Completed 10 requests in ${endTime - startTime}ms`);
});
test('should maintain connection efficiency', async () => {
const clientWithKeepAlive = new HttpClient({
timeout: 5000,
});
const requests = Array.from({ length: 3 }, () =>
clientWithKeepAlive.get(`${mockServerBaseUrl}/`)
);
const responses = await Promise.all(requests);
responses.forEach(response => {
expect(response.status).toBe(200);
});
});
});
});

View file

@ -1,159 +1,155 @@
import { describe, test, expect, beforeEach, beforeAll, afterAll } from 'bun:test';
import { HttpClient, HttpError, ProxyManager } from '../src/index';
import type { ProxyInfo } from '../src/types';
import { MockServer } from './mock-server';
// Global mock server instance
let mockServer: MockServer;
let mockServerBaseUrl: string;
beforeAll(async () => {
// Start mock server for all tests
mockServer = new MockServer();
await mockServer.start();
mockServerBaseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
// Stop mock server
await mockServer.stop();
});
describe('HttpClient', () => {
let client: HttpClient;
beforeEach(() => {
client = new HttpClient();
});
describe('Basic functionality', () => {
test('should create client with default config', () => {
expect(client).toBeInstanceOf(HttpClient);
});
test('should make GET request', async () => {
const response = await client.get(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('url');
expect(response.data).toHaveProperty('method', 'GET');
});
test('should make POST request with body', async () => {
const testData = {
title: 'Test Post',
body: 'Test body',
userId: 1,
};
const response = await client.post(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('data');
expect(response.data.data).toEqual(testData);
});
test('should handle custom headers', async () => {
const customHeaders = {
'X-Custom-Header': 'test-value',
'User-Agent': 'StockBot-HTTP-Client/1.0'
};
const response = await client.get(`${mockServerBaseUrl}/headers`, {
headers: customHeaders
});
expect(response.status).toBe(200);
expect(response.data.headers).toHaveProperty('x-custom-header', 'test-value');
expect(response.data.headers).toHaveProperty('user-agent', 'StockBot-HTTP-Client/1.0');
});
test('should handle timeout', async () => {
const clientWithTimeout = new HttpClient({ timeout: 1 }); // 1ms timeout
await expect(
clientWithTimeout.get('https://httpbin.org/delay/1')
).rejects.toThrow();
});
});
describe('Error handling', () => {
test('should handle HTTP errors', async () => {
await expect(
client.get(`${mockServerBaseUrl}/status/404`)
).rejects.toThrow(HttpError);
});
test('should handle network errors gracefully', async () => {
await expect(
client.get('https://nonexistent-domain-that-will-fail-12345.test')
).rejects.toThrow();
});
test('should handle invalid URLs', async () => {
await expect(
client.get('not:/a:valid/url')
).rejects.toThrow();
});
});
describe('HTTP methods', () => {
test('should make PUT request', async () => {
const testData = { id: 1, name: 'Updated' };
const response = await client.put(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
});
test('should make DELETE request', async () => {
const response = await client.del(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(response.data.method).toBe('DELETE');
});
test('should make PATCH request', async () => {
const testData = { name: 'Patched' };
const response = await client.patch(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
});
});
});
describe('ProxyManager', () => {
test('should determine when to use Bun fetch', () => {
const httpProxy: ProxyInfo = {
protocol: 'http',
host: 'proxy.example.com',
port: 8080
};
const socksProxy: ProxyInfo = {
protocol: 'socks5',
host: 'proxy.example.com',
port: 1080
};
expect(ProxyManager.shouldUseBunFetch(httpProxy)).toBe(true);
expect(ProxyManager.shouldUseBunFetch(socksProxy)).toBe(false);
});
test('should create proxy URL for Bun fetch', () => {
const proxy: ProxyInfo = {
protocol: 'http',
host: 'proxy.example.com',
port: 8080,
username: 'user',
password: 'pass' };
const proxyUrl = ProxyManager.createProxyUrl(proxy);
expect(proxyUrl).toBe('http://user:pass@proxy.example.com:8080');
});
test('should create proxy URL without credentials', () => {
const proxy: ProxyInfo = {
protocol: 'https',
host: 'proxy.example.com',
port: 8080 };
const proxyUrl = ProxyManager.createProxyUrl(proxy);
expect(proxyUrl).toBe('https://proxy.example.com:8080');
});
});
import { afterAll, beforeAll, beforeEach, describe, expect, test } from 'bun:test';
import { HttpClient, HttpError, ProxyManager } from '../src/index';
import type { ProxyInfo } from '../src/types';
import { MockServer } from './mock-server';
// Global mock server instance
let mockServer: MockServer;
let mockServerBaseUrl: string;
beforeAll(async () => {
// Start mock server for all tests
mockServer = new MockServer();
await mockServer.start();
mockServerBaseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
// Stop mock server
await mockServer.stop();
});
describe('HttpClient', () => {
let client: HttpClient;
beforeEach(() => {
client = new HttpClient();
});
describe('Basic functionality', () => {
test('should create client with default config', () => {
expect(client).toBeInstanceOf(HttpClient);
});
test('should make GET request', async () => {
const response = await client.get(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('url');
expect(response.data).toHaveProperty('method', 'GET');
});
test('should make POST request with body', async () => {
const testData = {
title: 'Test Post',
body: 'Test body',
userId: 1,
};
const response = await client.post(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
expect(response.data).toHaveProperty('data');
expect(response.data.data).toEqual(testData);
});
test('should handle custom headers', async () => {
const customHeaders = {
'X-Custom-Header': 'test-value',
'User-Agent': 'StockBot-HTTP-Client/1.0',
};
const response = await client.get(`${mockServerBaseUrl}/headers`, {
headers: customHeaders,
});
expect(response.status).toBe(200);
expect(response.data.headers).toHaveProperty('x-custom-header', 'test-value');
expect(response.data.headers).toHaveProperty('user-agent', 'StockBot-HTTP-Client/1.0');
});
test('should handle timeout', async () => {
const clientWithTimeout = new HttpClient({ timeout: 1 }); // 1ms timeout
await expect(clientWithTimeout.get('https://httpbin.org/delay/1')).rejects.toThrow();
});
});
describe('Error handling', () => {
test('should handle HTTP errors', async () => {
await expect(client.get(`${mockServerBaseUrl}/status/404`)).rejects.toThrow(HttpError);
});
test('should handle network errors gracefully', async () => {
await expect(
client.get('https://nonexistent-domain-that-will-fail-12345.test')
).rejects.toThrow();
});
test('should handle invalid URLs', async () => {
await expect(client.get('not:/a:valid/url')).rejects.toThrow();
});
});
describe('HTTP methods', () => {
test('should make PUT request', async () => {
const testData = { id: 1, name: 'Updated' };
const response = await client.put(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
});
test('should make DELETE request', async () => {
const response = await client.del(`${mockServerBaseUrl}/`);
expect(response.status).toBe(200);
expect(response.data.method).toBe('DELETE');
});
test('should make PATCH request', async () => {
const testData = { name: 'Patched' };
const response = await client.patch(`${mockServerBaseUrl}/post`, testData);
expect(response.status).toBe(200);
});
});
});
describe('ProxyManager', () => {
test('should determine when to use Bun fetch', () => {
const httpProxy: ProxyInfo = {
protocol: 'http',
host: 'proxy.example.com',
port: 8080,
};
const socksProxy: ProxyInfo = {
protocol: 'socks5',
host: 'proxy.example.com',
port: 1080,
};
expect(ProxyManager.shouldUseBunFetch(httpProxy)).toBe(true);
expect(ProxyManager.shouldUseBunFetch(socksProxy)).toBe(false);
});
test('should create proxy URL for Bun fetch', () => {
const proxy: ProxyInfo = {
protocol: 'http',
host: 'proxy.example.com',
port: 8080,
username: 'user',
password: 'pass',
};
const proxyUrl = ProxyManager.createProxyUrl(proxy);
expect(proxyUrl).toBe('http://user:pass@proxy.example.com:8080');
});
test('should create proxy URL without credentials', () => {
const proxy: ProxyInfo = {
protocol: 'https',
host: 'proxy.example.com',
port: 8080,
};
const proxyUrl = ProxyManager.createProxyUrl(proxy);
expect(proxyUrl).toBe('https://proxy.example.com:8080');
});
});

View file

@ -1,131 +1,132 @@
import { describe, test, expect, beforeAll, afterAll } from 'bun:test';
import { MockServer } from './mock-server';
/**
* Tests for the MockServer utility
* Ensures our test infrastructure works correctly
*/
describe('MockServer', () => {
let mockServer: MockServer;
let baseUrl: string;
beforeAll(async () => {
mockServer = new MockServer();
await mockServer.start();
baseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
await mockServer.stop();
});
describe('Server lifecycle', () => {
test('should start and provide base URL', () => {
expect(baseUrl).toMatch(/^http:\/\/localhost:\d+$/);
expect(mockServer.getBaseUrl()).toBe(baseUrl);
});
test('should be reachable', async () => {
const response = await fetch(`${baseUrl}/`);
expect(response.ok).toBe(true);
});
});
describe('Status endpoints', () => {
test('should return correct status codes', async () => {
const statusCodes = [200, 201, 400, 401, 403, 404, 500, 503];
for (const status of statusCodes) {
const response = await fetch(`${baseUrl}/status/${status}`);
expect(response.status).toBe(status);
}
});
});
describe('Headers endpoint', () => {
test('should echo request headers', async () => {
const response = await fetch(`${baseUrl}/headers`, {
headers: {
'X-Test-Header': 'test-value',
'User-Agent': 'MockServer-Test'
} });
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.headers).toHaveProperty('x-test-header', 'test-value');
expect(data.headers).toHaveProperty('user-agent', 'MockServer-Test');
});
});
describe('Basic auth endpoint', () => {
test('should authenticate valid credentials', async () => {
const username = 'testuser';
const password = 'testpass';
const credentials = btoa(`${username}:${password}`);
const response = await fetch(`${baseUrl}/basic-auth/${username}/${password}`, {
headers: {
'Authorization': `Basic ${credentials}`
}
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.authenticated).toBe(true);
expect(data.user).toBe(username);
});
test('should reject invalid credentials', async () => {
const credentials = btoa('wrong:credentials');
const response = await fetch(`${baseUrl}/basic-auth/user/pass`, {
headers: {
'Authorization': `Basic ${credentials}`
}
});
expect(response.status).toBe(401);
});
test('should reject missing auth header', async () => {
const response = await fetch(`${baseUrl}/basic-auth/user/pass`);
expect(response.status).toBe(401);
});
});
describe('POST endpoint', () => {
test('should echo POST data', async () => {
const testData = {
message: 'Hello, MockServer!',
timestamp: Date.now()
};
const response = await fetch(`${baseUrl}/post`, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(testData)
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.data).toEqual(testData);
expect(data.method).toBe('POST');
expect(data.headers).toHaveProperty('content-type', 'application/json');
});
});
describe('Default endpoint', () => {
test('should return request information', async () => {
const response = await fetch(`${baseUrl}/unknown-endpoint`);
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.url).toBe(`${baseUrl}/unknown-endpoint`);
expect(data.method).toBe('GET');
expect(data.headers).toBeDefined();
});
});
});
import { afterAll, beforeAll, describe, expect, test } from 'bun:test';
import { MockServer } from './mock-server';
/**
* Tests for the MockServer utility
* Ensures our test infrastructure works correctly
*/
describe('MockServer', () => {
let mockServer: MockServer;
let baseUrl: string;
beforeAll(async () => {
mockServer = new MockServer();
await mockServer.start();
baseUrl = mockServer.getBaseUrl();
});
afterAll(async () => {
await mockServer.stop();
});
describe('Server lifecycle', () => {
test('should start and provide base URL', () => {
expect(baseUrl).toMatch(/^http:\/\/localhost:\d+$/);
expect(mockServer.getBaseUrl()).toBe(baseUrl);
});
test('should be reachable', async () => {
const response = await fetch(`${baseUrl}/`);
expect(response.ok).toBe(true);
});
});
describe('Status endpoints', () => {
test('should return correct status codes', async () => {
const statusCodes = [200, 201, 400, 401, 403, 404, 500, 503];
for (const status of statusCodes) {
const response = await fetch(`${baseUrl}/status/${status}`);
expect(response.status).toBe(status);
}
});
});
describe('Headers endpoint', () => {
test('should echo request headers', async () => {
const response = await fetch(`${baseUrl}/headers`, {
headers: {
'X-Test-Header': 'test-value',
'User-Agent': 'MockServer-Test',
},
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.headers).toHaveProperty('x-test-header', 'test-value');
expect(data.headers).toHaveProperty('user-agent', 'MockServer-Test');
});
});
describe('Basic auth endpoint', () => {
test('should authenticate valid credentials', async () => {
const username = 'testuser';
const password = 'testpass';
const credentials = btoa(`${username}:${password}`);
const response = await fetch(`${baseUrl}/basic-auth/${username}/${password}`, {
headers: {
Authorization: `Basic ${credentials}`,
},
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.authenticated).toBe(true);
expect(data.user).toBe(username);
});
test('should reject invalid credentials', async () => {
const credentials = btoa('wrong:credentials');
const response = await fetch(`${baseUrl}/basic-auth/user/pass`, {
headers: {
Authorization: `Basic ${credentials}`,
},
});
expect(response.status).toBe(401);
});
test('should reject missing auth header', async () => {
const response = await fetch(`${baseUrl}/basic-auth/user/pass`);
expect(response.status).toBe(401);
});
});
describe('POST endpoint', () => {
test('should echo POST data', async () => {
const testData = {
message: 'Hello, MockServer!',
timestamp: Date.now(),
};
const response = await fetch(`${baseUrl}/post`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(testData),
});
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.data).toEqual(testData);
expect(data.method).toBe('POST');
expect(data.headers).toHaveProperty('content-type', 'application/json');
});
});
describe('Default endpoint', () => {
test('should return request information', async () => {
const response = await fetch(`${baseUrl}/unknown-endpoint`);
expect(response.ok).toBe(true);
const data = await response.json();
expect(data.url).toBe(`${baseUrl}/unknown-endpoint`);
expect(data.method).toBe('GET');
expect(data.headers).toBeDefined();
});
});
});

View file

@ -1,114 +1,116 @@
/**
* Mock HTTP server for testing the HTTP client
* Replaces external dependency on httpbin.org with a local server
*/
export class MockServer {
private server: ReturnType<typeof Bun.serve> | null = null;
private port: number = 0;
/**
* Start the mock server on a random port
*/
async start(): Promise<void> {
this.server = Bun.serve({
port: 1, // Use any available port
fetch: this.handleRequest.bind(this),
error: this.handleError.bind(this),
});
this.port = this.server.port || 1;
console.log(`Mock server started on port ${this.port}`);
}
/**
* Stop the mock server
*/
async stop(): Promise<void> {
if (this.server) {
this.server.stop(true);
this.server = null;
this.port = 0;
console.log('Mock server stopped');
}
}
/**
* Get the base URL of the mock server
*/
getBaseUrl(): string {
if (!this.server) {
throw new Error('Server not started');
}
return `http://localhost:${this.port}`;
}
/**
* Handle incoming requests
*/ private async handleRequest(req: Request): Promise<Response> {
const url = new URL(req.url);
const path = url.pathname;
console.log(`Mock server handling request: ${req.method} ${path}`);
// Status endpoints
if (path.startsWith('/status/')) {
const status = parseInt(path.replace('/status/', ''), 10);
console.log(`Returning status: ${status}`);
return new Response(null, { status });
} // Headers endpoint
if (path === '/headers') {
const headers = Object.fromEntries([...req.headers.entries()]);
console.log('Headers endpoint called, received headers:', headers);
return Response.json({ headers });
} // Basic auth endpoint
if (path.startsWith('/basic-auth/')) {
const parts = path.split('/').filter(Boolean);
const expectedUsername = parts[1];
const expectedPassword = parts[2];
console.log(`Basic auth endpoint called: expected user=${expectedUsername}, pass=${expectedPassword}`);
const authHeader = req.headers.get('authorization');
if (!authHeader || !authHeader.startsWith('Basic ')) {
console.log('Missing or invalid Authorization header');
return new Response('Unauthorized', { status: 401 });
}
const base64Credentials = authHeader.split(' ')[1];
const credentials = atob(base64Credentials);
const [username, password] = credentials.split(':');
if (username === expectedUsername && password === expectedPassword) {
return Response.json({
authenticated: true,
user: username
});
}
return new Response('Unauthorized', { status: 401 });
}
// Echo request body
if (path === '/post' && req.method === 'POST') {
const data = await req.json();
return Response.json({
data,
headers: Object.fromEntries([...req.headers.entries()]),
method: req.method
});
}
// Default response
return Response.json({
url: req.url,
method: req.method,
headers: Object.fromEntries([...req.headers.entries()])
});
}
/**
* Handle errors
*/
private handleError(error: Error): Response {
return new Response('Server error', { status: 500 });
}
}
/**
* Mock HTTP server for testing the HTTP client
* Replaces external dependency on httpbin.org with a local server
*/
export class MockServer {
private server: ReturnType<typeof Bun.serve> | null = null;
private port: number = 0;
/**
* Start the mock server on a random port
*/
async start(): Promise<void> {
this.server = Bun.serve({
port: 1, // Use any available port
fetch: this.handleRequest.bind(this),
error: this.handleError.bind(this),
});
this.port = this.server.port || 1;
console.log(`Mock server started on port ${this.port}`);
}
/**
* Stop the mock server
*/
async stop(): Promise<void> {
if (this.server) {
this.server.stop(true);
this.server = null;
this.port = 0;
console.log('Mock server stopped');
}
}
/**
* Get the base URL of the mock server
*/
getBaseUrl(): string {
if (!this.server) {
throw new Error('Server not started');
}
return `http://localhost:${this.port}`;
}
/**
* Handle incoming requests
*/ private async handleRequest(req: Request): Promise<Response> {
const url = new URL(req.url);
const path = url.pathname;
console.log(`Mock server handling request: ${req.method} ${path}`);
// Status endpoints
if (path.startsWith('/status/')) {
const status = parseInt(path.replace('/status/', ''), 10);
console.log(`Returning status: ${status}`);
return new Response(null, { status });
} // Headers endpoint
if (path === '/headers') {
const headers = Object.fromEntries([...req.headers.entries()]);
console.log('Headers endpoint called, received headers:', headers);
return Response.json({ headers });
} // Basic auth endpoint
if (path.startsWith('/basic-auth/')) {
const parts = path.split('/').filter(Boolean);
const expectedUsername = parts[1];
const expectedPassword = parts[2];
console.log(
`Basic auth endpoint called: expected user=${expectedUsername}, pass=${expectedPassword}`
);
const authHeader = req.headers.get('authorization');
if (!authHeader || !authHeader.startsWith('Basic ')) {
console.log('Missing or invalid Authorization header');
return new Response('Unauthorized', { status: 401 });
}
const base64Credentials = authHeader.split(' ')[1];
const credentials = atob(base64Credentials);
const [username, password] = credentials.split(':');
if (username === expectedUsername && password === expectedPassword) {
return Response.json({
authenticated: true,
user: username,
});
}
return new Response('Unauthorized', { status: 401 });
}
// Echo request body
if (path === '/post' && req.method === 'POST') {
const data = await req.json();
return Response.json({
data,
headers: Object.fromEntries([...req.headers.entries()]),
method: req.method,
});
}
// Default response
return Response.json({
url: req.url,
method: req.method,
headers: Object.fromEntries([...req.headers.entries()]),
});
}
/**
* Handle errors
*/
private handleError(error: Error): Response {
return new Response('Server error', { status: 500 });
}
}

View file

@ -1,18 +1,14 @@
/**
* @stock-bot/logger - Simplified logging library
*
* Main exports for the logger library
*/
// Core logger classes and functions
export {
Logger,
getLogger,
shutdownLoggers
} from './logger';
// Type definitions
export type { LogLevel, LogContext, LogMetadata } from './types';
// Default export
export { getLogger as default } from './logger';
/**
* @stock-bot/logger - Simplified logging library
*
* Main exports for the logger library
*/
// Core logger classes and functions
export { Logger, getLogger, shutdownLoggers } from './logger';
// Type definitions
export type { LogLevel, LogContext, LogMetadata } from './types';
// Default export
export { getLogger as default } from './logger';

View file

@ -1,271 +1,270 @@
/**
* Simplified Pino-based logger for Stock Bot platform
*
* Features:
* - High performance JSON logging with Pino
* - Console, file, and Loki transports
* - Structured logging with metadata
* - Service-specific context
*/
import pino from 'pino';
import { loggingConfig, lokiConfig } from '@stock-bot/config';
import type { LogLevel, LogContext, LogMetadata } from './types';
// Simple cache for logger instances
const loggerCache = new Map<string, pino.Logger>();
console.log('Logger cache initialized: ', loggingConfig.LOG_LEVEL);
/**
* Create transport configuration
*/
function createTransports(serviceName: string): any {
const targets: any[] = [];
// const isDev = loggingConfig.LOG_ENVIRONMENT === 'development';
// Console transport
if (loggingConfig.LOG_CONSOLE) {
targets.push({
target: 'pino-pretty',
level: loggingConfig.LOG_LEVEL, // Only show errors on console
options: {
colorize: true,
translateTime: 'yyyy-mm-dd HH:MM:ss.l',
messageFormat: '[{service}{childName}] {msg}',
singleLine: true,
hideObject: false,
ignore: 'pid,hostname,service,environment,version,childName',
errorLikeObjectKeys: ['err', 'error'],
errorProps: 'message,stack,name,code',
}
});
}
// File transport
if (loggingConfig.LOG_FILE) {
targets.push({
target: 'pino/file',
level: loggingConfig.LOG_LEVEL,
options: {
destination: `${loggingConfig.LOG_FILE_PATH}/${serviceName}.log`,
mkdir: true
}
});
}
// Loki transport
if (lokiConfig.LOKI_HOST) {
targets.push({
target: 'pino-loki',
level: loggingConfig.LOG_LEVEL,
options: {
host: lokiConfig.LOKI_URL || `http://${lokiConfig.LOKI_HOST}:${lokiConfig.LOKI_PORT}`,
labels: {
service: serviceName,
environment: lokiConfig.LOKI_ENVIRONMENT_LABEL
},
ignore: 'childName',
}
});
}
return targets.length > 0 ? { targets } : null;
}
/**
* Get or create pino logger
*/
function getPinoLogger(serviceName: string): pino.Logger {
if (!loggerCache.has(serviceName)) {
const transport = createTransports(serviceName);
const config: pino.LoggerOptions = {
level: loggingConfig.LOG_LEVEL,
base: {
service: serviceName,
environment: loggingConfig.LOG_ENVIRONMENT,
version: loggingConfig.LOG_SERVICE_VERSION
}
};
if (transport) {
config.transport = transport;
}
loggerCache.set(serviceName, pino(config));
}
return loggerCache.get(serviceName)!;
}
/**
* Simplified Logger class
*/
export class Logger {
private pino: pino.Logger;
private context: LogContext;
private serviceName: string;
private childName?: string;
constructor(serviceName: string, context: LogContext = {}) {
this.pino = getPinoLogger(serviceName);
this.context = context;
this.serviceName = serviceName;
}
/**
* Core log method
*/
private log(level: LogLevel, message: string | object, metadata?: LogMetadata): void {
const data = { ...this.context, ...metadata };
if (typeof message === 'string') {
(this.pino as any)[level](data, message);
} else {
(this.pino as any)[level]({ ...data, data: message }, 'Object logged');
}
}
// Simple log level methods
debug(message: string | object, metadata?: LogMetadata): void {
this.log('debug', message, metadata);
}
info(message: string | object, metadata?: LogMetadata): void {
this.log('info', message, metadata);
}
warn(message: string | object, metadata?: LogMetadata): void {
this.log('warn', message, metadata);
}
error(message: string | object, metadata?: LogMetadata & { error?: any } | unknown): void {
let data: any = {};
// Handle metadata parameter normalization
if (metadata instanceof Error) {
// Direct Error object as metadata
data = { error: metadata };
} else if (metadata !== null && typeof metadata === 'object') {
// Object metadata (including arrays, but not null)
data = { ...metadata };
} else if (metadata !== undefined) {
// Primitive values (string, number, boolean, etc.)
data = { metadata };
}
// Handle multiple error properties in metadata
const errorKeys = ['error', 'err', 'primaryError', 'secondaryError'];
errorKeys.forEach(key => {
if (data[key]) {
const normalizedKey = key === 'error' ? 'err' : `${key}_normalized`;
data[normalizedKey] = this.normalizeError(data[key]);
// Only delete the original 'error' key to maintain other error properties
if (key === 'error') {
delete data.error;
}
}
});
this.log('error', message, data);
}
/**
* Normalize any error type to a structured format
*/
private normalizeError(error: any): any {
if (error instanceof Error) {
return {
name: error.name,
message: error.message,
stack: error.stack,
};
}
if (error && typeof error === 'object') {
// Handle error-like objects
return {
name: error.name || 'UnknownError',
message: error.message || error.toString(),
...(error.stack && { stack: error.stack }),
...(error.code && { code: error.code }),
...(error.status && { status: error.status })
};
}
// Handle primitives (string, number, etc.)
return {
name: 'UnknownError',
message: String(error)
};
}
/**
* Create child logger with additional context
*/
child(serviceName: string, context?: LogContext): Logger {
// Create child logger that shares the same pino instance with additional context
const childLogger = Object.create(Logger.prototype);
childLogger.serviceName = this.serviceName;
childLogger.childName = serviceName;
childLogger.context = { ...this.context, ...context };
const childBindings = {
service: this.serviceName,
childName: ' -> ' + serviceName,
...(context || childLogger.context)
};
childLogger.pino = this.pino.child(childBindings);
return childLogger;
// }
// childLogger.pino = this.pino.child(context || childLogger.context); // Let pino handle level inheritance naturally
// return childLogger;
}
// Getters for service and context
getServiceName(): string {
return this.serviceName;
}
getChildName(): string | undefined {
return this.childName;
}
}
/**
* Main factory function
*/
export function getLogger(serviceName: string, context?: LogContext): Logger {
return new Logger(serviceName, context);
}
/**
* Gracefully shutdown all logger instances
* This should be called during application shutdown to ensure all logs are flushed
*/
export async function shutdownLoggers(): Promise<void> {
const flushPromises = Array.from(loggerCache.values()).map(logger => {
return new Promise<void>((resolve) => {
if (typeof logger.flush === 'function') {
logger.flush((err) => {
if (err) {
console.error('Logger flush error:', err);
}
resolve();
});
} else {
resolve();
}
});
});
try {
await Promise.allSettled(flushPromises);
console.log('All loggers flushed successfully');
} catch (error) {
console.error('Logger flush failed:', error);
} finally {
loggerCache.clear();
}
}
// Export types for convenience
export type { LogLevel, LogContext, LogMetadata } from './types';
/**
* Simplified Pino-based logger for Stock Bot platform
*
* Features:
* - High performance JSON logging with Pino
* - Console, file, and Loki transports
* - Structured logging with metadata
* - Service-specific context
*/
import pino from 'pino';
import { loggingConfig, lokiConfig } from '@stock-bot/config';
import type { LogContext, LogLevel, LogMetadata } from './types';
// Simple cache for logger instances
const loggerCache = new Map<string, pino.Logger>();
console.log('Logger cache initialized: ', loggingConfig.LOG_LEVEL);
/**
* Create transport configuration
*/
function createTransports(serviceName: string): any {
const targets: any[] = [];
// const isDev = loggingConfig.LOG_ENVIRONMENT === 'development';
// Console transport
if (loggingConfig.LOG_CONSOLE) {
targets.push({
target: 'pino-pretty',
level: loggingConfig.LOG_LEVEL, // Only show errors on console
options: {
colorize: true,
translateTime: 'yyyy-mm-dd HH:MM:ss.l',
messageFormat: '[{service}{childName}] {msg}',
singleLine: true,
hideObject: false,
ignore: 'pid,hostname,service,environment,version,childName',
errorLikeObjectKeys: ['err', 'error'],
errorProps: 'message,stack,name,code',
},
});
}
// File transport
if (loggingConfig.LOG_FILE) {
targets.push({
target: 'pino/file',
level: loggingConfig.LOG_LEVEL,
options: {
destination: `${loggingConfig.LOG_FILE_PATH}/${serviceName}.log`,
mkdir: true,
},
});
}
// Loki transport
if (lokiConfig.LOKI_HOST) {
targets.push({
target: 'pino-loki',
level: loggingConfig.LOG_LEVEL,
options: {
host: lokiConfig.LOKI_URL || `http://${lokiConfig.LOKI_HOST}:${lokiConfig.LOKI_PORT}`,
labels: {
service: serviceName,
environment: lokiConfig.LOKI_ENVIRONMENT_LABEL,
},
ignore: 'childName',
},
});
}
return targets.length > 0 ? { targets } : null;
}
/**
* Get or create pino logger
*/
function getPinoLogger(serviceName: string): pino.Logger {
if (!loggerCache.has(serviceName)) {
const transport = createTransports(serviceName);
const config: pino.LoggerOptions = {
level: loggingConfig.LOG_LEVEL,
base: {
service: serviceName,
environment: loggingConfig.LOG_ENVIRONMENT,
version: loggingConfig.LOG_SERVICE_VERSION,
},
};
if (transport) {
config.transport = transport;
}
loggerCache.set(serviceName, pino(config));
}
return loggerCache.get(serviceName)!;
}
/**
* Simplified Logger class
*/
export class Logger {
private pino: pino.Logger;
private context: LogContext;
private serviceName: string;
private childName?: string;
constructor(serviceName: string, context: LogContext = {}) {
this.pino = getPinoLogger(serviceName);
this.context = context;
this.serviceName = serviceName;
}
/**
* Core log method
*/
private log(level: LogLevel, message: string | object, metadata?: LogMetadata): void {
const data = { ...this.context, ...metadata };
if (typeof message === 'string') {
(this.pino as any)[level](data, message);
} else {
(this.pino as any)[level]({ ...data, data: message }, 'Object logged');
}
}
// Simple log level methods
debug(message: string | object, metadata?: LogMetadata): void {
this.log('debug', message, metadata);
}
info(message: string | object, metadata?: LogMetadata): void {
this.log('info', message, metadata);
}
warn(message: string | object, metadata?: LogMetadata): void {
this.log('warn', message, metadata);
}
error(message: string | object, metadata?: (LogMetadata & { error?: any }) | unknown): void {
let data: any = {};
// Handle metadata parameter normalization
if (metadata instanceof Error) {
// Direct Error object as metadata
data = { error: metadata };
} else if (metadata !== null && typeof metadata === 'object') {
// Object metadata (including arrays, but not null)
data = { ...metadata };
} else if (metadata !== undefined) {
// Primitive values (string, number, boolean, etc.)
data = { metadata };
}
// Handle multiple error properties in metadata
const errorKeys = ['error', 'err', 'primaryError', 'secondaryError'];
errorKeys.forEach(key => {
if (data[key]) {
const normalizedKey = key === 'error' ? 'err' : `${key}_normalized`;
data[normalizedKey] = this.normalizeError(data[key]);
// Only delete the original 'error' key to maintain other error properties
if (key === 'error') {
delete data.error;
}
}
});
this.log('error', message, data);
}
/**
* Normalize any error type to a structured format
*/
private normalizeError(error: any): any {
if (error instanceof Error) {
return {
name: error.name,
message: error.message,
stack: error.stack,
};
}
if (error && typeof error === 'object') {
// Handle error-like objects
return {
name: error.name || 'UnknownError',
message: error.message || error.toString(),
...(error.stack && { stack: error.stack }),
...(error.code && { code: error.code }),
...(error.status && { status: error.status }),
};
}
// Handle primitives (string, number, etc.)
return {
name: 'UnknownError',
message: String(error),
};
}
/**
* Create child logger with additional context
*/
child(serviceName: string, context?: LogContext): Logger {
// Create child logger that shares the same pino instance with additional context
const childLogger = Object.create(Logger.prototype);
childLogger.serviceName = this.serviceName;
childLogger.childName = serviceName;
childLogger.context = { ...this.context, ...context };
const childBindings = {
service: this.serviceName,
childName: ' -> ' + serviceName,
...(context || childLogger.context),
};
childLogger.pino = this.pino.child(childBindings);
return childLogger;
// }
// childLogger.pino = this.pino.child(context || childLogger.context); // Let pino handle level inheritance naturally
// return childLogger;
}
// Getters for service and context
getServiceName(): string {
return this.serviceName;
}
getChildName(): string | undefined {
return this.childName;
}
}
/**
* Main factory function
*/
export function getLogger(serviceName: string, context?: LogContext): Logger {
return new Logger(serviceName, context);
}
/**
* Gracefully shutdown all logger instances
* This should be called during application shutdown to ensure all logs are flushed
*/
export async function shutdownLoggers(): Promise<void> {
const flushPromises = Array.from(loggerCache.values()).map(logger => {
return new Promise<void>(resolve => {
if (typeof logger.flush === 'function') {
logger.flush(err => {
if (err) {
console.error('Logger flush error:', err);
}
resolve();
});
} else {
resolve();
}
});
});
try {
await Promise.allSettled(flushPromises);
console.log('All loggers flushed successfully');
} catch (error) {
console.error('Logger flush failed:', error);
} finally {
loggerCache.clear();
}
}
// Export types for convenience
export type { LogLevel, LogContext, LogMetadata } from './types';

View file

@ -1,16 +1,16 @@
/**
* Simplified type definitions for the logger library
*/
// Standard log levels (simplified to pino defaults)
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
// Context that persists across log calls
export interface LogContext {
[key: string]: any;
}
// Metadata for individual log entries
export interface LogMetadata {
[key: string]: any;
}
/**
* Simplified type definitions for the logger library
*/
// Standard log levels (simplified to pino defaults)
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
// Context that persists across log calls
export interface LogContext {
[key: string]: any;
}
// Metadata for individual log entries
export interface LogMetadata {
[key: string]: any;
}

View file

@ -1,200 +1,201 @@
/**
* Advanced Logger Tests
*
* Tests for advanced logger functionality including complex metadata handling,
* child loggers, and advanced error scenarios.
*/
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
import { Logger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Advanced Logger Features', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('advanced-features');
logger = testLoggerInstance.logger;
}); afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Complex Metadata Handling', () => {
it('should handle nested metadata objects', () => {
const complexMetadata = {
user: { id: '123', name: 'John Doe' },
session: { id: 'sess-456', timeout: 3600 },
request: { method: 'POST', path: '/api/test' }
};
logger.info('Complex operation', complexMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].user).toEqual({ id: '123', name: 'John Doe' });
expect(logs[0].session).toEqual({ id: 'sess-456', timeout: 3600 });
expect(logs[0].request).toEqual({ method: 'POST', path: '/api/test' });
});
it('should handle arrays in metadata', () => {
const arrayMetadata = {
tags: ['user', 'authentication', 'success'],
ids: [1, 2, 3, 4]
};
logger.info('Array metadata test', arrayMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].tags).toEqual(['user', 'authentication', 'success']);
expect(logs[0].ids).toEqual([1, 2, 3, 4]);
});
it('should handle null and undefined metadata values', () => {
const nullMetadata = {
nullValue: null,
undefinedValue: undefined,
emptyString: '',
zeroValue: 0
};
logger.info('Null metadata test', nullMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].nullValue).toBe(null);
expect(logs[0].emptyString).toBe('');
expect(logs[0].zeroValue).toBe(0);
});
});
describe('Child Logger Functionality', () => {
it('should create child logger with additional context', () => {
const childLogger = logger.child({
component: 'auth-service',
version: '1.2.3'
});
childLogger.info('Child logger message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].component).toBe('auth-service');
expect(logs[0].version).toBe('1.2.3');
expect(logs[0].msg).toBe('Child logger message');
});
it('should support nested child loggers', () => {
const childLogger = logger.child({ level1: 'parent' });
const grandChildLogger = childLogger.child({ level2: 'child' });
grandChildLogger.warn('Nested child message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level1).toBe('parent');
expect(logs[0].level2).toBe('child');
expect(logs[0].level).toBe('warn');
});
it('should merge child context with log metadata', () => {
const childLogger = logger.child({ service: 'api' });
childLogger.info('Request processed', {
requestId: 'req-789',
duration: 150
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('api');
expect(logs[0].requestId).toBe('req-789');
expect(logs[0].duration).toBe(150);
});
});
describe('Advanced Error Handling', () => {
it('should handle Error objects with custom properties', () => {
const customError = new Error('Custom error message');
(customError as any).code = 'ERR_CUSTOM';
(customError as any).statusCode = 500;
logger.error('Custom error occurred', { error: customError });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle multiple errors in metadata', () => {
const error1 = new Error('First error');
const error2 = new Error('Second error');
logger.error('Multiple errors', {
primaryError: error1,
secondaryError: error2,
context: 'batch processing'
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].context).toBe('batch processing');
});
it('should handle error objects with circular references', () => {
const errorWithCircular: any = { name: 'CircularError', message: 'Circular reference error' };
// Create a simple circular reference
errorWithCircular.self = errorWithCircular;
// Should not throw when logging circular references
expect(() => {
logger.error('Circular error test', { error: errorWithCircular });
}).not.toThrow();
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
// Clean up circular reference to prevent memory issues
delete errorWithCircular.self;
});
});
describe('Performance and Edge Cases', () => {
it('should handle moderate metadata objects', () => {
const moderateMetadata: any = {};
for (let i = 0; i < 10; i++) {
moderateMetadata[`key${i}`] = `value${i}`;
}
logger.debug('Moderate metadata test', moderateMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].key0).toBe('value0');
expect(logs[0].key9).toBe('value9');
});
it('should handle special characters in messages', () => {
const specialMessage = 'Special chars: 🚀 ñ ü';
logger.info(specialMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe(specialMessage);
});
it('should handle empty and whitespace-only messages', () => {
logger.info('');
logger.info(' ');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('');
expect(logs[1].msg).toBe(' ');
});
});
});
/**
* Advanced Logger Tests
*
* Tests for advanced logger functionality including complex metadata handling,
* child loggers, and advanced error scenarios.
*/
import { afterEach, beforeEach, describe, expect, it } from 'bun:test';
import { Logger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Advanced Logger Features', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('advanced-features');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Complex Metadata Handling', () => {
it('should handle nested metadata objects', () => {
const complexMetadata = {
user: { id: '123', name: 'John Doe' },
session: { id: 'sess-456', timeout: 3600 },
request: { method: 'POST', path: '/api/test' },
};
logger.info('Complex operation', complexMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].user).toEqual({ id: '123', name: 'John Doe' });
expect(logs[0].session).toEqual({ id: 'sess-456', timeout: 3600 });
expect(logs[0].request).toEqual({ method: 'POST', path: '/api/test' });
});
it('should handle arrays in metadata', () => {
const arrayMetadata = {
tags: ['user', 'authentication', 'success'],
ids: [1, 2, 3, 4],
};
logger.info('Array metadata test', arrayMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].tags).toEqual(['user', 'authentication', 'success']);
expect(logs[0].ids).toEqual([1, 2, 3, 4]);
});
it('should handle null and undefined metadata values', () => {
const nullMetadata = {
nullValue: null,
undefinedValue: undefined,
emptyString: '',
zeroValue: 0,
};
logger.info('Null metadata test', nullMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].nullValue).toBe(null);
expect(logs[0].emptyString).toBe('');
expect(logs[0].zeroValue).toBe(0);
});
});
describe('Child Logger Functionality', () => {
it('should create child logger with additional context', () => {
const childLogger = logger.child({
component: 'auth-service',
version: '1.2.3',
});
childLogger.info('Child logger message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].component).toBe('auth-service');
expect(logs[0].version).toBe('1.2.3');
expect(logs[0].msg).toBe('Child logger message');
});
it('should support nested child loggers', () => {
const childLogger = logger.child({ level1: 'parent' });
const grandChildLogger = childLogger.child({ level2: 'child' });
grandChildLogger.warn('Nested child message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level1).toBe('parent');
expect(logs[0].level2).toBe('child');
expect(logs[0].level).toBe('warn');
});
it('should merge child context with log metadata', () => {
const childLogger = logger.child({ service: 'api' });
childLogger.info('Request processed', {
requestId: 'req-789',
duration: 150,
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('api');
expect(logs[0].requestId).toBe('req-789');
expect(logs[0].duration).toBe(150);
});
});
describe('Advanced Error Handling', () => {
it('should handle Error objects with custom properties', () => {
const customError = new Error('Custom error message');
(customError as any).code = 'ERR_CUSTOM';
(customError as any).statusCode = 500;
logger.error('Custom error occurred', { error: customError });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle multiple errors in metadata', () => {
const error1 = new Error('First error');
const error2 = new Error('Second error');
logger.error('Multiple errors', {
primaryError: error1,
secondaryError: error2,
context: 'batch processing',
});
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].context).toBe('batch processing');
});
it('should handle error objects with circular references', () => {
const errorWithCircular: any = { name: 'CircularError', message: 'Circular reference error' };
// Create a simple circular reference
errorWithCircular.self = errorWithCircular;
// Should not throw when logging circular references
expect(() => {
logger.error('Circular error test', { error: errorWithCircular });
}).not.toThrow();
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
// Clean up circular reference to prevent memory issues
delete errorWithCircular.self;
});
});
describe('Performance and Edge Cases', () => {
it('should handle moderate metadata objects', () => {
const moderateMetadata: any = {};
for (let i = 0; i < 10; i++) {
moderateMetadata[`key${i}`] = `value${i}`;
}
logger.debug('Moderate metadata test', moderateMetadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].key0).toBe('value0');
expect(logs[0].key9).toBe('value9');
});
it('should handle special characters in messages', () => {
const specialMessage = 'Special chars: 🚀 ñ ü';
logger.info(specialMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe(specialMessage);
});
it('should handle empty and whitespace-only messages', () => {
logger.info('');
logger.info(' ');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('');
expect(logs[1].msg).toBe(' ');
});
});
});

View file

@ -1,169 +1,169 @@
/**
* Basic Logger Tests
*
* Tests for the core logger functionality and utilities.
*/
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
import { Logger, getLogger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Basic Logger Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('utils-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Logger Factory Functions', () => {
it('should create logger with getLogger', () => {
expect(typeof getLogger).toBe('function');
// Test that getLogger doesn't throw
expect(() => {
const anotherTestLoggerInstance = loggerTestHelpers.createTestLogger('factory-test');
anotherTestLoggerInstance.logger.info('Factory test');
}).not.toThrow();
});
});
describe('Logger Methods', () => {
it('should have all required logging methods', () => {
expect(typeof logger.debug).toBe('function');
expect(typeof logger.info).toBe('function');
expect(typeof logger.warn).toBe('function');
expect(typeof logger.error).toBe('function');
expect(typeof logger.child).toBe('function');
});
it('should log with different message types', () => {
// String message
logger.info('String message');
// Object message
logger.info({ event: 'object_message', data: 'test' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('String message');
expect(logs[1].level).toBe('info');
});
it('should handle metadata correctly', () => {
const metadata = {
userId: 'user123',
sessionId: 'session456',
requestId: 'req789'
};
logger.info('Request processed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('user123');
expect(logs[0].sessionId).toBe('session456');
expect(logs[0].requestId).toBe('req789');
});
});
describe('Child Logger Functionality', () => {
it('should create child loggers with additional context', () => {
const childLogger = logger.child({
module: 'payment',
version: '1.0.0'
});
childLogger.info('Payment processed');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Payment processed');
});
it('should inherit service name in child loggers', () => {
const childLogger = logger.child({ operation: 'test' });
childLogger.info('Child operation');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('utils-test');
});
});
describe('Error Normalization', () => {
it('should handle Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error test', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'ValidationError',
message: 'Invalid input',
code: 'VALIDATION_FAILED'
};
logger.error('Validation failed', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle primitive error values', () => {
logger.error('Simple error', { error: 'Error string' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
});
describe('Service Context', () => {
it('should include service name in all logs', () => {
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warn message');
logger.error('Error message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(4);
logs.forEach(log => {
expect(log.service).toBe('utils-test');
});
});
it('should support different service names', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-one');
const logger2Instance = loggerTestHelpers.createTestLogger('service-two');
logger1Instance.logger.info('Message from service one');
logger2Instance.logger.info('Message from service two');
// Since each logger instance has its own capture, we check them separately
// or combine them if that's the desired test logic.
// For this test, it seems we want to ensure they are separate.
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-one');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-two');
});
});
});
/**
* Basic Logger Tests
*
* Tests for the core logger functionality and utilities.
*/
import { afterEach, beforeEach, describe, expect, it } from 'bun:test';
import { getLogger, Logger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Basic Logger Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('utils-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Logger Factory Functions', () => {
it('should create logger with getLogger', () => {
expect(typeof getLogger).toBe('function');
// Test that getLogger doesn't throw
expect(() => {
const anotherTestLoggerInstance = loggerTestHelpers.createTestLogger('factory-test');
anotherTestLoggerInstance.logger.info('Factory test');
}).not.toThrow();
});
});
describe('Logger Methods', () => {
it('should have all required logging methods', () => {
expect(typeof logger.debug).toBe('function');
expect(typeof logger.info).toBe('function');
expect(typeof logger.warn).toBe('function');
expect(typeof logger.error).toBe('function');
expect(typeof logger.child).toBe('function');
});
it('should log with different message types', () => {
// String message
logger.info('String message');
// Object message
logger.info({ event: 'object_message', data: 'test' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(2);
expect(logs[0].msg).toBe('String message');
expect(logs[1].level).toBe('info');
});
it('should handle metadata correctly', () => {
const metadata = {
userId: 'user123',
sessionId: 'session456',
requestId: 'req789',
};
logger.info('Request processed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('user123');
expect(logs[0].sessionId).toBe('session456');
expect(logs[0].requestId).toBe('req789');
});
});
describe('Child Logger Functionality', () => {
it('should create child loggers with additional context', () => {
const childLogger = logger.child({
module: 'payment',
version: '1.0.0',
});
childLogger.info('Payment processed');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Payment processed');
});
it('should inherit service name in child loggers', () => {
const childLogger = logger.child({ operation: 'test' });
childLogger.info('Child operation');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].service).toBe('utils-test');
});
});
describe('Error Normalization', () => {
it('should handle Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error test', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'ValidationError',
message: 'Invalid input',
code: 'VALIDATION_FAILED',
};
logger.error('Validation failed', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
it('should handle primitive error values', () => {
logger.error('Simple error', { error: 'Error string' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
});
});
describe('Service Context', () => {
it('should include service name in all logs', () => {
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warn message');
logger.error('Error message');
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(4);
logs.forEach(log => {
expect(log.service).toBe('utils-test');
});
});
it('should support different service names', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-one');
const logger2Instance = loggerTestHelpers.createTestLogger('service-two');
logger1Instance.logger.info('Message from service one');
logger2Instance.logger.info('Message from service two');
// Since each logger instance has its own capture, we check them separately
// or combine them if that's the desired test logic.
// For this test, it seems we want to ensure they are separate.
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-one');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-two');
});
});
});

View file

@ -1,192 +1,188 @@
/**
* Logger Integration Tests
*
* Tests the core functionality of the simplified @stock-bot/logger package.
*/
import { describe, it, expect, beforeEach, afterEach } from 'bun:test';
import {
Logger,
getLogger,
shutdownLoggers
} from '../src';
import { loggerTestHelpers } from './setup';
describe('Logger Integration Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('integration-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Core Logger Functionality', () => {
it('should log messages at different levels', () => {
// Test multiple log levels
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warning message');
logger.error('Error message');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify logs were captured
expect(logs.length).toBe(4);
expect(logs[0].level).toBe('debug');
expect(logs[0].msg).toBe('Debug message');
expect(logs[1].level).toBe('info');
expect(logs[1].msg).toBe('Info message');
expect(logs[2].level).toBe('warn');
expect(logs[2].msg).toBe('Warning message');
expect(logs[3].level).toBe('error');
expect(logs[3].msg).toBe('Error message');
});
it('should log objects as structured logs', () => {
// Log an object
logger.info('User logged in', { userId: '123', action: 'login' });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify structured log
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('123');
expect(logs[0].action).toBe('login');
expect(logs[0].msg).toBe('User logged in');
});
it('should handle error objects in error logs', () => {
const testError = new Error('Test error message');
// Log error with error object
logger.error('Something went wrong', { error: testError });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify error was logged
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Something went wrong');
});
it('should create child loggers with additional context', () => {
// Create a child logger with additional context
const childLogger = logger.child({
transactionId: 'tx-789',
operation: 'payment'
});
// Log with child logger
childLogger.info('Child logger test');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify child logger logged something
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Child logger test');
});
});
describe('Factory Functions', () => {
it('should export factory functions', () => {
// Verify that the factory functions are exported and callable
expect(typeof getLogger).toBe('function');
});
it('should create different logger instances', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-1');
const logger2Instance = loggerTestHelpers.createTestLogger('service-2');
logger1Instance.logger.info('Message from service 1');
logger2Instance.logger.info('Message from service 2');
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-1');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-2');
});
});
describe('Error Handling', () => {
it('should normalize Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error occurred', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Error occurred');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'CustomError',
message: 'Custom error message',
code: 'ERR_CUSTOM'
};
logger.error('Custom error occurred', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle primitive error values', () => {
logger.error('String error occurred', { error: 'Simple string error' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('String error occurred');
});
});
describe('Metadata Handling', () => {
it('should include metadata in logs', () => {
const metadata = {
requestId: 'req-123',
userId: 'user-456',
operation: 'data-fetch'
};
logger.info('Operation completed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].requestId).toBe('req-123');
expect(logs[0].userId).toBe('user-456');
expect(logs[0].operation).toBe('data-fetch');
});
it('should handle object messages', () => {
const objectMessage = {
event: 'user_action',
action: 'login',
timestamp: Date.now()
};
logger.info(objectMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('info');
});
});
});
/**
* Logger Integration Tests
*
* Tests the core functionality of the simplified @stock-bot/logger package.
*/
import { afterEach, beforeEach, describe, expect, it } from 'bun:test';
import { getLogger, Logger, shutdownLoggers } from '../src';
import { loggerTestHelpers } from './setup';
describe('Logger Integration Tests', () => {
let logger: Logger;
let testLoggerInstance: ReturnType<typeof loggerTestHelpers.createTestLogger>;
beforeEach(() => {
testLoggerInstance = loggerTestHelpers.createTestLogger('integration-test');
logger = testLoggerInstance.logger;
});
afterEach(async () => {
testLoggerInstance.clearCapturedLogs();
// Clear any global logger cache
await shutdownLoggers();
});
describe('Core Logger Functionality', () => {
it('should log messages at different levels', () => {
// Test multiple log levels
logger.debug('Debug message');
logger.info('Info message');
logger.warn('Warning message');
logger.error('Error message');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify logs were captured
expect(logs.length).toBe(4);
expect(logs[0].level).toBe('debug');
expect(logs[0].msg).toBe('Debug message');
expect(logs[1].level).toBe('info');
expect(logs[1].msg).toBe('Info message');
expect(logs[2].level).toBe('warn');
expect(logs[2].msg).toBe('Warning message');
expect(logs[3].level).toBe('error');
expect(logs[3].msg).toBe('Error message');
});
it('should log objects as structured logs', () => {
// Log an object
logger.info('User logged in', { userId: '123', action: 'login' });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify structured log
expect(logs.length).toBe(1);
expect(logs[0].userId).toBe('123');
expect(logs[0].action).toBe('login');
expect(logs[0].msg).toBe('User logged in');
});
it('should handle error objects in error logs', () => {
const testError = new Error('Test error message');
// Log error with error object
logger.error('Something went wrong', { error: testError });
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify error was logged
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Something went wrong');
});
it('should create child loggers with additional context', () => {
// Create a child logger with additional context
const childLogger = logger.child({
transactionId: 'tx-789',
operation: 'payment',
});
// Log with child logger
childLogger.info('Child logger test');
// Get captured logs
const logs = testLoggerInstance.getCapturedLogs();
// Verify child logger logged something
expect(logs.length).toBe(1);
expect(logs[0].msg).toBe('Child logger test');
});
});
describe('Factory Functions', () => {
it('should export factory functions', () => {
// Verify that the factory functions are exported and callable
expect(typeof getLogger).toBe('function');
});
it('should create different logger instances', () => {
const logger1Instance = loggerTestHelpers.createTestLogger('service-1');
const logger2Instance = loggerTestHelpers.createTestLogger('service-2');
logger1Instance.logger.info('Message from service 1');
logger2Instance.logger.info('Message from service 2');
const logs1 = logger1Instance.getCapturedLogs();
expect(logs1.length).toBe(1);
expect(logs1[0].service).toBe('service-1');
const logs2 = logger2Instance.getCapturedLogs();
expect(logs2.length).toBe(1);
expect(logs2[0].service).toBe('service-2');
});
});
describe('Error Handling', () => {
it('should normalize Error objects', () => {
const error = new Error('Test error');
error.stack = 'Error stack trace';
logger.error('Error occurred', error);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Error occurred');
});
it('should handle error-like objects', () => {
const errorLike = {
name: 'CustomError',
message: 'Custom error message',
code: 'ERR_CUSTOM',
};
logger.error('Custom error occurred', { error: errorLike });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('Custom error occurred');
});
it('should handle primitive error values', () => {
logger.error('String error occurred', { error: 'Simple string error' });
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('error');
expect(logs[0].msg).toBe('String error occurred');
});
});
describe('Metadata Handling', () => {
it('should include metadata in logs', () => {
const metadata = {
requestId: 'req-123',
userId: 'user-456',
operation: 'data-fetch',
};
logger.info('Operation completed', metadata);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].requestId).toBe('req-123');
expect(logs[0].userId).toBe('user-456');
expect(logs[0].operation).toBe('data-fetch');
});
it('should handle object messages', () => {
const objectMessage = {
event: 'user_action',
action: 'login',
timestamp: Date.now(),
};
logger.info(objectMessage);
const logs = testLoggerInstance.getCapturedLogs();
expect(logs.length).toBe(1);
expect(logs[0].level).toBe('info');
});
});
});

View file

@ -1,137 +1,150 @@
/**
* Logger Test Setup
*
* Setup file specific to Logger library tests.
* Provides utilities and mocks for testing logging operations.
*/
import { Logger, LogMetadata, shutdownLoggers } from '../src';
import { afterAll, afterEach, beforeAll, beforeEach } from 'bun:test';
// Store original console methods
const originalConsole = {
log: console.log,
info: console.info,
warn: console.warn,
error: console.error,
debug: console.debug
};
// Create a test logger helper
export const loggerTestHelpers = {
/**
* Mock Loki transport
*/
mockLokiTransport: () => ({
on: () => {},
write: () => {}
}),
/**
* Create a mock Hono context for middleware tests
*/ createHonoContextMock: (options: any = {}) => {
// Default path and method
const path = options.path || '/test';
const method = options.method || 'GET';
// Create request headers
const headerEntries = Object.entries(options.req?.headers || {});
const headerMap = new Map(headerEntries);
const rawHeaders = new Headers();
headerEntries.forEach(([key, value]) => rawHeaders.set(key, value as string));
// Create request with standard properties needed for middleware
const req = {
method,
url: `http://localhost${path}`,
path,
raw: {
url: `http://localhost${path}`,
method,
headers: rawHeaders
},
query: {},
param: () => undefined,
header: (name: string) => rawHeaders.get(name.toLowerCase()),
headers: headerMap,
...options.req
};
// Create mock response
const res = {
status: 200,
statusText: 'OK',
body: null,
headers: new Map(),
clone: function() { return { ...this, text: async () => JSON.stringify(this.body) }; },
text: async () => JSON.stringify(res.body),
...options.res
};
// Create context with all required Hono methods
const c: any = {
req,
env: {},
res,
header: (name: string, value: string) => {
c.res.headers.set(name.toLowerCase(), value);
return c;
},
get: (key: string) => c[key],
set: (key: string, value: any) => { c[key] = value; return c; },
status: (code: number) => { c.res.status = code; return c; },
json: (body: any) => { c.res.body = body; return c; },
executionCtx: { waitUntil: (fn: Function) => { fn(); } }
};
return c;
},
/**
* Create a mock Next function for middleware tests
*/
createNextMock: () => {
return async () => {
// Do nothing, simulate middleware completion
return;
};
}
};
// Setup environment before tests
beforeAll(() => {
// Don't let real logs through during tests
console.log = () => {};
console.info = () => {};
console.warn = () => {};
console.error = () => {};
console.debug = () => {};
// Override NODE_ENV for tests
process.env.NODE_ENV = 'test';
// Disable real logging during tests
process.env.LOG_LEVEL = 'silent';
process.env.LOG_CONSOLE = 'false';
process.env.LOG_FILE = 'false';
// Mock Loki config to prevent real connections
process.env.LOKI_HOST = '';
process.env.LOKI_URL = '';
});
// Clean up after each test
afterEach(async () => {
// Clear logger cache to prevent state pollution between tests
await shutdownLoggers();
});
// Restore everything after tests
afterAll(() => {
console.log = originalConsole.log;
console.info = originalConsole.info;
console.warn = originalConsole.warn;
console.error = originalConsole.error;
console.debug = originalConsole.debug;
});
/**
* Logger Test Setup
*
* Setup file specific to Logger library tests.
* Provides utilities and mocks for testing logging operations.
*/
import { afterAll, afterEach, beforeAll, beforeEach } from 'bun:test';
import { Logger, LogMetadata, shutdownLoggers } from '../src';
// Store original console methods
const originalConsole = {
log: console.log,
info: console.info,
warn: console.warn,
error: console.error,
debug: console.debug,
};
// Create a test logger helper
export const loggerTestHelpers = {
/**
* Mock Loki transport
*/
mockLokiTransport: () => ({
on: () => {},
write: () => {},
}),
/**
* Create a mock Hono context for middleware tests
*/ createHonoContextMock: (options: any = {}) => {
// Default path and method
const path = options.path || '/test';
const method = options.method || 'GET';
// Create request headers
const headerEntries = Object.entries(options.req?.headers || {});
const headerMap = new Map(headerEntries);
const rawHeaders = new Headers();
headerEntries.forEach(([key, value]) => rawHeaders.set(key, value as string));
// Create request with standard properties needed for middleware
const req = {
method,
url: `http://localhost${path}`,
path,
raw: {
url: `http://localhost${path}`,
method,
headers: rawHeaders,
},
query: {},
param: () => undefined,
header: (name: string) => rawHeaders.get(name.toLowerCase()),
headers: headerMap,
...options.req,
};
// Create mock response
const res = {
status: 200,
statusText: 'OK',
body: null,
headers: new Map(),
clone: function () {
return { ...this, text: async () => JSON.stringify(this.body) };
},
text: async () => JSON.stringify(res.body),
...options.res,
};
// Create context with all required Hono methods
const c: any = {
req,
env: {},
res,
header: (name: string, value: string) => {
c.res.headers.set(name.toLowerCase(), value);
return c;
},
get: (key: string) => c[key],
set: (key: string, value: any) => {
c[key] = value;
return c;
},
status: (code: number) => {
c.res.status = code;
return c;
},
json: (body: any) => {
c.res.body = body;
return c;
},
executionCtx: {
waitUntil: (fn: Function) => {
fn();
},
},
};
return c;
},
/**
* Create a mock Next function for middleware tests
*/
createNextMock: () => {
return async () => {
// Do nothing, simulate middleware completion
return;
};
},
};
// Setup environment before tests
beforeAll(() => {
// Don't let real logs through during tests
console.log = () => {};
console.info = () => {};
console.warn = () => {};
console.error = () => {};
console.debug = () => {};
// Override NODE_ENV for tests
process.env.NODE_ENV = 'test';
// Disable real logging during tests
process.env.LOG_LEVEL = 'silent';
process.env.LOG_CONSOLE = 'false';
process.env.LOG_FILE = 'false';
// Mock Loki config to prevent real connections
process.env.LOKI_HOST = '';
process.env.LOKI_URL = '';
});
// Clean up after each test
afterEach(async () => {
// Clear logger cache to prevent state pollution between tests
await shutdownLoggers();
});
// Restore everything after tests
afterAll(() => {
console.log = originalConsole.log;
console.info = originalConsole.info;
console.warn = originalConsole.warn;
console.error = originalConsole.error;
console.debug = originalConsole.debug;
});

View file

@ -1,247 +1,247 @@
import type { Document } from 'mongodb';
import type { MongoDBClient } from './client';
import type { CollectionNames } from './types';
/**
* MongoDB Aggregation Builder
*
* Provides a fluent interface for building MongoDB aggregation pipelines
*/
export class MongoDBAggregationBuilder {
private pipeline: any[] = [];
private readonly client: MongoDBClient;
private collection: CollectionNames | null = null;
constructor(client: MongoDBClient) {
this.client = client;
}
/**
* Set the collection to aggregate on
*/
from(collection: CollectionNames): this {
this.collection = collection;
return this;
}
/**
* Add a match stage
*/
match(filter: any): this {
this.pipeline.push({ $match: filter });
return this;
}
/**
* Add a group stage
*/
group(groupBy: any): this {
this.pipeline.push({ $group: groupBy });
return this;
}
/**
* Add a sort stage
*/
sort(sortBy: any): this {
this.pipeline.push({ $sort: sortBy });
return this;
}
/**
* Add a limit stage
*/
limit(count: number): this {
this.pipeline.push({ $limit: count });
return this;
}
/**
* Add a skip stage
*/
skip(count: number): this {
this.pipeline.push({ $skip: count });
return this;
}
/**
* Add a project stage
*/
project(projection: any): this {
this.pipeline.push({ $project: projection });
return this;
}
/**
* Add an unwind stage
*/
unwind(field: string, options?: any): this {
this.pipeline.push({
$unwind: options ? { path: field, ...options } : field
});
return this;
}
/**
* Add a lookup stage (join)
*/
lookup(from: string, localField: string, foreignField: string, as: string): this {
this.pipeline.push({
$lookup: {
from,
localField,
foreignField,
as
}
});
return this;
}
/**
* Add a custom stage
*/
addStage(stage: any): this {
this.pipeline.push(stage);
return this;
}
/**
* Execute the aggregation pipeline
*/
async execute<T extends Document = Document>(): Promise<T[]> {
if (!this.collection) {
throw new Error('Collection not specified. Use .from() to set the collection.');
}
const collection = this.client.getCollection(this.collection);
return await collection.aggregate<T>(this.pipeline).toArray();
}
/**
* Get the pipeline array
*/
getPipeline(): any[] {
return [...this.pipeline];
}
/**
* Reset the pipeline
*/
reset(): this {
this.pipeline = [];
this.collection = null;
return this;
}
// Convenience methods for common aggregations
/**
* Sentiment analysis aggregation
*/
sentimentAnalysis(symbol?: string, timeframe?: { start: Date; end: Date }): this {
this.from('sentiment_data');
const matchConditions: any = {};
if (symbol) matchConditions.symbol = symbol;
if (timeframe) {
matchConditions.timestamp = {
$gte: timeframe.start,
$lte: timeframe.end
};
}
if (Object.keys(matchConditions).length > 0) {
this.match(matchConditions);
}
return this.group({
_id: {
symbol: '$symbol',
sentiment: '$sentiment_label'
},
count: { $sum: 1 },
avgScore: { $avg: '$sentiment_score' },
avgConfidence: { $avg: '$confidence' }
});
}
/**
* News article aggregation by publication
*/
newsByPublication(symbols?: string[]): this {
this.from('news_articles');
if (symbols && symbols.length > 0) {
this.match({ symbols: { $in: symbols } });
}
return this.group({
_id: '$publication',
articleCount: { $sum: 1 },
symbols: { $addToSet: '$symbols' },
avgSentiment: { $avg: '$sentiment_score' },
latestArticle: { $max: '$published_date' }
});
}
/**
* SEC filings by company
*/
secFilingsByCompany(filingTypes?: string[]): this {
this.from('sec_filings');
if (filingTypes && filingTypes.length > 0) {
this.match({ filing_type: { $in: filingTypes } });
}
return this.group({
_id: {
cik: '$cik',
company: '$company_name'
},
filingCount: { $sum: 1 },
filingTypes: { $addToSet: '$filing_type' },
latestFiling: { $max: '$filing_date' },
symbols: { $addToSet: '$symbols' }
});
}
/**
* Document processing status summary
*/
processingStatusSummary(collection: CollectionNames): this {
this.from(collection);
return this.group({
_id: '$processing_status',
count: { $sum: 1 },
avgSizeBytes: { $avg: '$size_bytes' },
oldestDocument: { $min: '$created_at' },
newestDocument: { $max: '$created_at' }
});
}
/**
* Time-based aggregation (daily/hourly counts)
*/
timeBasedCounts(
collection: CollectionNames,
dateField: string = 'created_at',
interval: 'hour' | 'day' | 'week' | 'month' = 'day'
): this {
this.from(collection);
const dateFormat = {
hour: { $dateToString: { format: '%Y-%m-%d %H:00:00', date: `$${dateField}` } },
day: { $dateToString: { format: '%Y-%m-%d', date: `$${dateField}` } },
week: { $dateToString: { format: '%Y-W%V', date: `$${dateField}` } },
month: { $dateToString: { format: '%Y-%m', date: `$${dateField}` } }
};
return this.group({
_id: dateFormat[interval],
count: { $sum: 1 },
firstDocument: { $min: `$${dateField}` },
lastDocument: { $max: `$${dateField}` }
}).sort({ _id: 1 });
}
}
import type { Document } from 'mongodb';
import type { MongoDBClient } from './client';
import type { CollectionNames } from './types';
/**
* MongoDB Aggregation Builder
*
* Provides a fluent interface for building MongoDB aggregation pipelines
*/
export class MongoDBAggregationBuilder {
private pipeline: any[] = [];
private readonly client: MongoDBClient;
private collection: CollectionNames | null = null;
constructor(client: MongoDBClient) {
this.client = client;
}
/**
* Set the collection to aggregate on
*/
from(collection: CollectionNames): this {
this.collection = collection;
return this;
}
/**
* Add a match stage
*/
match(filter: any): this {
this.pipeline.push({ $match: filter });
return this;
}
/**
* Add a group stage
*/
group(groupBy: any): this {
this.pipeline.push({ $group: groupBy });
return this;
}
/**
* Add a sort stage
*/
sort(sortBy: any): this {
this.pipeline.push({ $sort: sortBy });
return this;
}
/**
* Add a limit stage
*/
limit(count: number): this {
this.pipeline.push({ $limit: count });
return this;
}
/**
* Add a skip stage
*/
skip(count: number): this {
this.pipeline.push({ $skip: count });
return this;
}
/**
* Add a project stage
*/
project(projection: any): this {
this.pipeline.push({ $project: projection });
return this;
}
/**
* Add an unwind stage
*/
unwind(field: string, options?: any): this {
this.pipeline.push({
$unwind: options ? { path: field, ...options } : field,
});
return this;
}
/**
* Add a lookup stage (join)
*/
lookup(from: string, localField: string, foreignField: string, as: string): this {
this.pipeline.push({
$lookup: {
from,
localField,
foreignField,
as,
},
});
return this;
}
/**
* Add a custom stage
*/
addStage(stage: any): this {
this.pipeline.push(stage);
return this;
}
/**
* Execute the aggregation pipeline
*/
async execute<T extends Document = Document>(): Promise<T[]> {
if (!this.collection) {
throw new Error('Collection not specified. Use .from() to set the collection.');
}
const collection = this.client.getCollection(this.collection);
return await collection.aggregate<T>(this.pipeline).toArray();
}
/**
* Get the pipeline array
*/
getPipeline(): any[] {
return [...this.pipeline];
}
/**
* Reset the pipeline
*/
reset(): this {
this.pipeline = [];
this.collection = null;
return this;
}
// Convenience methods for common aggregations
/**
* Sentiment analysis aggregation
*/
sentimentAnalysis(symbol?: string, timeframe?: { start: Date; end: Date }): this {
this.from('sentiment_data');
const matchConditions: any = {};
if (symbol) matchConditions.symbol = symbol;
if (timeframe) {
matchConditions.timestamp = {
$gte: timeframe.start,
$lte: timeframe.end,
};
}
if (Object.keys(matchConditions).length > 0) {
this.match(matchConditions);
}
return this.group({
_id: {
symbol: '$symbol',
sentiment: '$sentiment_label',
},
count: { $sum: 1 },
avgScore: { $avg: '$sentiment_score' },
avgConfidence: { $avg: '$confidence' },
});
}
/**
* News article aggregation by publication
*/
newsByPublication(symbols?: string[]): this {
this.from('news_articles');
if (symbols && symbols.length > 0) {
this.match({ symbols: { $in: symbols } });
}
return this.group({
_id: '$publication',
articleCount: { $sum: 1 },
symbols: { $addToSet: '$symbols' },
avgSentiment: { $avg: '$sentiment_score' },
latestArticle: { $max: '$published_date' },
});
}
/**
* SEC filings by company
*/
secFilingsByCompany(filingTypes?: string[]): this {
this.from('sec_filings');
if (filingTypes && filingTypes.length > 0) {
this.match({ filing_type: { $in: filingTypes } });
}
return this.group({
_id: {
cik: '$cik',
company: '$company_name',
},
filingCount: { $sum: 1 },
filingTypes: { $addToSet: '$filing_type' },
latestFiling: { $max: '$filing_date' },
symbols: { $addToSet: '$symbols' },
});
}
/**
* Document processing status summary
*/
processingStatusSummary(collection: CollectionNames): this {
this.from(collection);
return this.group({
_id: '$processing_status',
count: { $sum: 1 },
avgSizeBytes: { $avg: '$size_bytes' },
oldestDocument: { $min: '$created_at' },
newestDocument: { $max: '$created_at' },
});
}
/**
* Time-based aggregation (daily/hourly counts)
*/
timeBasedCounts(
collection: CollectionNames,
dateField: string = 'created_at',
interval: 'hour' | 'day' | 'week' | 'month' = 'day'
): this {
this.from(collection);
const dateFormat = {
hour: { $dateToString: { format: '%Y-%m-%d %H:00:00', date: `$${dateField}` } },
day: { $dateToString: { format: '%Y-%m-%d', date: `$${dateField}` } },
week: { $dateToString: { format: '%Y-W%V', date: `$${dateField}` } },
month: { $dateToString: { format: '%Y-%m', date: `$${dateField}` } },
};
return this.group({
_id: dateFormat[interval],
count: { $sum: 1 },
firstDocument: { $min: `$${dateField}` },
lastDocument: { $max: `$${dateField}` },
}).sort({ _id: 1 });
}
}

View file

@ -1,379 +1,396 @@
import { MongoClient, Db, Collection, MongoClientOptions, Document, WithId, OptionalUnlessRequiredId } from 'mongodb';
import { mongodbConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import type {
MongoDBClientConfig,
MongoDBConnectionOptions,
CollectionNames,
DocumentBase,
SentimentData,
RawDocument,
NewsArticle,
SecFiling,
EarningsTranscript,
AnalystReport
} from './types';
import { MongoDBHealthMonitor } from './health';
import { schemaMap } from './schemas';
import * as yup from 'yup';
/**
* MongoDB Client for Stock Bot
*
* Provides type-safe access to MongoDB collections with built-in
* health monitoring, connection pooling, and schema validation.
*/
export class MongoDBClient {
private client: MongoClient | null = null;
private db: Db | null = null;
private readonly config: MongoDBClientConfig;
private readonly options: MongoDBConnectionOptions;
private readonly logger: ReturnType<typeof getLogger>;
private readonly healthMonitor: MongoDBHealthMonitor;
private isConnected = false;
constructor(
config?: Partial<MongoDBClientConfig>,
options?: MongoDBConnectionOptions
) {
this.config = this.buildConfig(config);
this.options = {
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options
};
this.logger = getLogger('mongodb-client');
this.healthMonitor = new MongoDBHealthMonitor(this);
}
/**
* Connect to MongoDB
*/
async connect(): Promise<void> {
if (this.isConnected && this.client) {
return;
}
const uri = this.buildConnectionUri();
const clientOptions = this.buildClientOptions();
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(`Connecting to MongoDB (attempt ${attempt}/${this.options.retryAttempts})...`);
this.client = new MongoClient(uri, clientOptions);
await this.client.connect();
// Test the connection
await this.client.db(this.config.database).admin().ping();
this.db = this.client.db(this.config.database);
this.isConnected = true;
this.logger.info('Successfully connected to MongoDB');
// Start health monitoring
this.healthMonitor.start();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`MongoDB connection attempt ${attempt} failed:`, error);
if (this.client) {
await this.client.close();
this.client = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(`Failed to connect to MongoDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`);
}
/**
* Disconnect from MongoDB
*/
async disconnect(): Promise<void> {
if (!this.client) {
return;
}
try {
this.healthMonitor.stop();
await this.client.close();
this.isConnected = false;
this.client = null;
this.db = null;
this.logger.info('Disconnected from MongoDB');
} catch (error) {
this.logger.error('Error disconnecting from MongoDB:', error);
throw error;
}
}
/**
* Get a typed collection
*/
getCollection<T extends DocumentBase>(name: CollectionNames): Collection<T> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
return this.db.collection<T>(name);
}
/**
* Insert a document with validation
*/
async insertOne<T extends DocumentBase>(
collectionName: CollectionNames,
document: Omit<T, '_id' | 'created_at' | 'updated_at'> & Partial<Pick<T, 'created_at' | 'updated_at'>>
): Promise<T> {
const collection = this.getCollection<T>(collectionName);
// Add timestamps
const now = new Date();
const docWithTimestamps = {
...document,
created_at: document.created_at || now,
updated_at: now
} as T; // Validate document if schema exists
if (collectionName in schemaMap) {
try {
(schemaMap as any)[collectionName].validateSync(docWithTimestamps);
} catch (error) {
if (error instanceof yup.ValidationError) {
this.logger.error(`Document validation failed for ${collectionName}:`, error.errors);
throw new Error(`Document validation failed: ${error.errors?.map(e => e).join(', ')}`);
}
throw error;
}
}const result = await collection.insertOne(docWithTimestamps as OptionalUnlessRequiredId<T>);
return { ...docWithTimestamps, _id: result.insertedId } as T;
}
/**
* Update a document with validation
*/
async updateOne<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any,
update: Partial<T>
): Promise<boolean> {
const collection = this.getCollection<T>(collectionName);
// Add updated timestamp
const updateWithTimestamp = {
...update,
updated_at: new Date()
};
const result = await collection.updateOne(filter, { $set: updateWithTimestamp });
return result.modifiedCount > 0;
}
/**
* Find documents with optional validation
*/
async find<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any = {},
options: any = {}
): Promise<T[]> {
const collection = this.getCollection<T>(collectionName);
return await collection.find(filter, options).toArray() as T[];
}
/**
* Find one document
*/
async findOne<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any
): Promise<T | null> {
const collection = this.getCollection<T>(collectionName);
return await collection.findOne(filter) as T | null;
}
/**
* Aggregate with type safety
*/
async aggregate<T extends DocumentBase>(
collectionName: CollectionNames,
pipeline: any[]
): Promise<T[]> {
const collection = this.getCollection<T>(collectionName);
return await collection.aggregate<T>(pipeline).toArray();
}
/**
* Count documents
*/
async countDocuments(
collectionName: CollectionNames,
filter: any = {}
): Promise<number> {
const collection = this.getCollection(collectionName);
return await collection.countDocuments(filter);
}
/**
* Create indexes for better performance
*/
async createIndexes(): Promise<void> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
try {
// Sentiment data indexes
await this.db.collection('sentiment_data').createIndexes([
{ key: { symbol: 1, timestamp: -1 } },
{ key: { sentiment_label: 1 } },
{ key: { source_type: 1 } },
{ key: { created_at: -1 } }
]);
// News articles indexes
await this.db.collection('news_articles').createIndexes([
{ key: { symbols: 1, published_date: -1 } },
{ key: { publication: 1 } },
{ key: { categories: 1 } },
{ key: { created_at: -1 } }
]);
// SEC filings indexes
await this.db.collection('sec_filings').createIndexes([
{ key: { symbols: 1, filing_date: -1 } },
{ key: { filing_type: 1 } },
{ key: { cik: 1 } },
{ key: { created_at: -1 } }
]); // Raw documents indexes
await this.db.collection('raw_documents').createIndex(
{ content_hash: 1 },
{ unique: true }
);
await this.db.collection('raw_documents').createIndexes([
{ key: { processing_status: 1 } },
{ key: { document_type: 1 } },
{ key: { created_at: -1 } }
]);
this.logger.info('MongoDB indexes created successfully');
} catch (error) {
this.logger.error('Error creating MongoDB indexes:', error);
throw error;
}
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
return await this.db.stats();
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.client;
}
/**
* Get the underlying MongoDB client
*/
get mongoClient(): MongoClient | null {
return this.client;
}
/**
* Get the database instance
*/
get database(): Db | null {
return this.db;
}
private buildConfig(config?: Partial<MongoDBClientConfig>): MongoDBClientConfig {
return {
host: config?.host || mongodbConfig.MONGODB_HOST,
port: config?.port || mongodbConfig.MONGODB_PORT,
database: config?.database || mongodbConfig.MONGODB_DATABASE,
username: config?.username || mongodbConfig.MONGODB_USERNAME,
password: config?.password || mongodbConfig.MONGODB_PASSWORD,
authSource: config?.authSource || mongodbConfig.MONGODB_AUTH_SOURCE,
uri: config?.uri || mongodbConfig.MONGODB_URI,
poolSettings: {
maxPoolSize: mongodbConfig.MONGODB_MAX_POOL_SIZE,
minPoolSize: mongodbConfig.MONGODB_MIN_POOL_SIZE,
maxIdleTime: mongodbConfig.MONGODB_MAX_IDLE_TIME,
...config?.poolSettings
},
timeouts: {
connectTimeout: mongodbConfig.MONGODB_CONNECT_TIMEOUT,
socketTimeout: mongodbConfig.MONGODB_SOCKET_TIMEOUT,
serverSelectionTimeout: mongodbConfig.MONGODB_SERVER_SELECTION_TIMEOUT,
...config?.timeouts
},
tls: {
enabled: mongodbConfig.MONGODB_TLS,
insecure: mongodbConfig.MONGODB_TLS_INSECURE,
caFile: mongodbConfig.MONGODB_TLS_CA_FILE,
...config?.tls
},
options: {
retryWrites: mongodbConfig.MONGODB_RETRY_WRITES,
journal: mongodbConfig.MONGODB_JOURNAL,
readPreference: mongodbConfig.MONGODB_READ_PREFERENCE as any,
writeConcern: mongodbConfig.MONGODB_WRITE_CONCERN,
...config?.options
}
};
}
private buildConnectionUri(): string {
if (this.config.uri) {
return this.config.uri;
}
const { host, port, username, password, database, authSource } = this.config;
const auth = username && password ? `${username}:${password}@` : '';
const authDb = authSource ? `?authSource=${authSource}` : '';
return `mongodb://${auth}${host}:${port}/${database}${authDb}`;
}
private buildClientOptions(): MongoClientOptions {
return {
maxPoolSize: this.config.poolSettings?.maxPoolSize,
minPoolSize: this.config.poolSettings?.minPoolSize,
maxIdleTimeMS: this.config.poolSettings?.maxIdleTime,
connectTimeoutMS: this.config.timeouts?.connectTimeout,
socketTimeoutMS: this.config.timeouts?.socketTimeout,
serverSelectionTimeoutMS: this.config.timeouts?.serverSelectionTimeout,
retryWrites: this.config.options?.retryWrites,
journal: this.config.options?.journal,
readPreference: this.config.options?.readPreference, writeConcern: this.config.options?.writeConcern ? {
w: this.config.options.writeConcern === 'majority'
? 'majority' as const
: parseInt(this.config.options.writeConcern, 10) || 1
} : undefined,
tls: this.config.tls?.enabled,
tlsInsecure: this.config.tls?.insecure,
tlsCAFile: this.config.tls?.caFile
};
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
import {
Collection,
Db,
Document,
MongoClient,
MongoClientOptions,
OptionalUnlessRequiredId,
WithId,
} from 'mongodb';
import * as yup from 'yup';
import { mongodbConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import { MongoDBHealthMonitor } from './health';
import { schemaMap } from './schemas';
import type {
AnalystReport,
CollectionNames,
DocumentBase,
EarningsTranscript,
MongoDBClientConfig,
MongoDBConnectionOptions,
NewsArticle,
RawDocument,
SecFiling,
SentimentData,
} from './types';
/**
* MongoDB Client for Stock Bot
*
* Provides type-safe access to MongoDB collections with built-in
* health monitoring, connection pooling, and schema validation.
*/
export class MongoDBClient {
private client: MongoClient | null = null;
private db: Db | null = null;
private readonly config: MongoDBClientConfig;
private readonly options: MongoDBConnectionOptions;
private readonly logger: ReturnType<typeof getLogger>;
private readonly healthMonitor: MongoDBHealthMonitor;
private isConnected = false;
constructor(config?: Partial<MongoDBClientConfig>, options?: MongoDBConnectionOptions) {
this.config = this.buildConfig(config);
this.options = {
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options,
};
this.logger = getLogger('mongodb-client');
this.healthMonitor = new MongoDBHealthMonitor(this);
}
/**
* Connect to MongoDB
*/
async connect(): Promise<void> {
if (this.isConnected && this.client) {
return;
}
const uri = this.buildConnectionUri();
const clientOptions = this.buildClientOptions();
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(
`Connecting to MongoDB (attempt ${attempt}/${this.options.retryAttempts})...`
);
this.client = new MongoClient(uri, clientOptions);
await this.client.connect();
// Test the connection
await this.client.db(this.config.database).admin().ping();
this.db = this.client.db(this.config.database);
this.isConnected = true;
this.logger.info('Successfully connected to MongoDB');
// Start health monitoring
this.healthMonitor.start();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`MongoDB connection attempt ${attempt} failed:`, error);
if (this.client) {
await this.client.close();
this.client = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(
`Failed to connect to MongoDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`
);
}
/**
* Disconnect from MongoDB
*/
async disconnect(): Promise<void> {
if (!this.client) {
return;
}
try {
this.healthMonitor.stop();
await this.client.close();
this.isConnected = false;
this.client = null;
this.db = null;
this.logger.info('Disconnected from MongoDB');
} catch (error) {
this.logger.error('Error disconnecting from MongoDB:', error);
throw error;
}
}
/**
* Get a typed collection
*/
getCollection<T extends DocumentBase>(name: CollectionNames): Collection<T> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
return this.db.collection<T>(name);
}
/**
* Insert a document with validation
*/
async insertOne<T extends DocumentBase>(
collectionName: CollectionNames,
document: Omit<T, '_id' | 'created_at' | 'updated_at'> &
Partial<Pick<T, 'created_at' | 'updated_at'>>
): Promise<T> {
const collection = this.getCollection<T>(collectionName);
// Add timestamps
const now = new Date();
const docWithTimestamps = {
...document,
created_at: document.created_at || now,
updated_at: now,
} as T; // Validate document if schema exists
if (collectionName in schemaMap) {
try {
(schemaMap as any)[collectionName].validateSync(docWithTimestamps);
} catch (error) {
if (error instanceof yup.ValidationError) {
this.logger.error(`Document validation failed for ${collectionName}:`, error.errors);
throw new Error(`Document validation failed: ${error.errors?.map(e => e).join(', ')}`);
}
throw error;
}
}
const result = await collection.insertOne(docWithTimestamps as OptionalUnlessRequiredId<T>);
return { ...docWithTimestamps, _id: result.insertedId } as T;
}
/**
* Update a document with validation
*/
async updateOne<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any,
update: Partial<T>
): Promise<boolean> {
const collection = this.getCollection<T>(collectionName);
// Add updated timestamp
const updateWithTimestamp = {
...update,
updated_at: new Date(),
};
const result = await collection.updateOne(filter, { $set: updateWithTimestamp });
return result.modifiedCount > 0;
}
/**
* Find documents with optional validation
*/
async find<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any = {},
options: any = {}
): Promise<T[]> {
const collection = this.getCollection<T>(collectionName);
return (await collection.find(filter, options).toArray()) as T[];
}
/**
* Find one document
*/
async findOne<T extends DocumentBase>(
collectionName: CollectionNames,
filter: any
): Promise<T | null> {
const collection = this.getCollection<T>(collectionName);
return (await collection.findOne(filter)) as T | null;
}
/**
* Aggregate with type safety
*/
async aggregate<T extends DocumentBase>(
collectionName: CollectionNames,
pipeline: any[]
): Promise<T[]> {
const collection = this.getCollection<T>(collectionName);
return await collection.aggregate<T>(pipeline).toArray();
}
/**
* Count documents
*/
async countDocuments(collectionName: CollectionNames, filter: any = {}): Promise<number> {
const collection = this.getCollection(collectionName);
return await collection.countDocuments(filter);
}
/**
* Create indexes for better performance
*/
async createIndexes(): Promise<void> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
try {
// Sentiment data indexes
await this.db
.collection('sentiment_data')
.createIndexes([
{ key: { symbol: 1, timestamp: -1 } },
{ key: { sentiment_label: 1 } },
{ key: { source_type: 1 } },
{ key: { created_at: -1 } },
]);
// News articles indexes
await this.db
.collection('news_articles')
.createIndexes([
{ key: { symbols: 1, published_date: -1 } },
{ key: { publication: 1 } },
{ key: { categories: 1 } },
{ key: { created_at: -1 } },
]);
// SEC filings indexes
await this.db
.collection('sec_filings')
.createIndexes([
{ key: { symbols: 1, filing_date: -1 } },
{ key: { filing_type: 1 } },
{ key: { cik: 1 } },
{ key: { created_at: -1 } },
]); // Raw documents indexes
await this.db.collection('raw_documents').createIndex({ content_hash: 1 }, { unique: true });
await this.db
.collection('raw_documents')
.createIndexes([
{ key: { processing_status: 1 } },
{ key: { document_type: 1 } },
{ key: { created_at: -1 } },
]);
this.logger.info('MongoDB indexes created successfully');
} catch (error) {
this.logger.error('Error creating MongoDB indexes:', error);
throw error;
}
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
if (!this.db) {
throw new Error('MongoDB client not connected');
}
return await this.db.stats();
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.client;
}
/**
* Get the underlying MongoDB client
*/
get mongoClient(): MongoClient | null {
return this.client;
}
/**
* Get the database instance
*/
get database(): Db | null {
return this.db;
}
private buildConfig(config?: Partial<MongoDBClientConfig>): MongoDBClientConfig {
return {
host: config?.host || mongodbConfig.MONGODB_HOST,
port: config?.port || mongodbConfig.MONGODB_PORT,
database: config?.database || mongodbConfig.MONGODB_DATABASE,
username: config?.username || mongodbConfig.MONGODB_USERNAME,
password: config?.password || mongodbConfig.MONGODB_PASSWORD,
authSource: config?.authSource || mongodbConfig.MONGODB_AUTH_SOURCE,
uri: config?.uri || mongodbConfig.MONGODB_URI,
poolSettings: {
maxPoolSize: mongodbConfig.MONGODB_MAX_POOL_SIZE,
minPoolSize: mongodbConfig.MONGODB_MIN_POOL_SIZE,
maxIdleTime: mongodbConfig.MONGODB_MAX_IDLE_TIME,
...config?.poolSettings,
},
timeouts: {
connectTimeout: mongodbConfig.MONGODB_CONNECT_TIMEOUT,
socketTimeout: mongodbConfig.MONGODB_SOCKET_TIMEOUT,
serverSelectionTimeout: mongodbConfig.MONGODB_SERVER_SELECTION_TIMEOUT,
...config?.timeouts,
},
tls: {
enabled: mongodbConfig.MONGODB_TLS,
insecure: mongodbConfig.MONGODB_TLS_INSECURE,
caFile: mongodbConfig.MONGODB_TLS_CA_FILE,
...config?.tls,
},
options: {
retryWrites: mongodbConfig.MONGODB_RETRY_WRITES,
journal: mongodbConfig.MONGODB_JOURNAL,
readPreference: mongodbConfig.MONGODB_READ_PREFERENCE as any,
writeConcern: mongodbConfig.MONGODB_WRITE_CONCERN,
...config?.options,
},
};
}
private buildConnectionUri(): string {
if (this.config.uri) {
return this.config.uri;
}
const { host, port, username, password, database, authSource } = this.config;
const auth = username && password ? `${username}:${password}@` : '';
const authDb = authSource ? `?authSource=${authSource}` : '';
return `mongodb://${auth}${host}:${port}/${database}${authDb}`;
}
private buildClientOptions(): MongoClientOptions {
return {
maxPoolSize: this.config.poolSettings?.maxPoolSize,
minPoolSize: this.config.poolSettings?.minPoolSize,
maxIdleTimeMS: this.config.poolSettings?.maxIdleTime,
connectTimeoutMS: this.config.timeouts?.connectTimeout,
socketTimeoutMS: this.config.timeouts?.socketTimeout,
serverSelectionTimeoutMS: this.config.timeouts?.serverSelectionTimeout,
retryWrites: this.config.options?.retryWrites,
journal: this.config.options?.journal,
readPreference: this.config.options?.readPreference,
writeConcern: this.config.options?.writeConcern
? {
w:
this.config.options.writeConcern === 'majority'
? ('majority' as const)
: parseInt(this.config.options.writeConcern, 10) || 1,
}
: undefined,
tls: this.config.tls?.enabled,
tlsInsecure: this.config.tls?.insecure,
tlsCAFile: this.config.tls?.caFile,
};
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}

View file

@ -1,66 +1,66 @@
import { MongoDBClient } from './client';
import { mongodbConfig } from '@stock-bot/config';
import type { MongoDBClientConfig, MongoDBConnectionOptions } from './types';
/**
* Factory function to create a MongoDB client instance
*/
export function createMongoDBClient(
config?: Partial<MongoDBClientConfig>,
options?: MongoDBConnectionOptions
): MongoDBClient {
return new MongoDBClient(config, options);
}
/**
* Create a MongoDB client with default configuration
*/
export function createDefaultMongoDBClient(): MongoDBClient {
const config: Partial<MongoDBClientConfig> = {
host: mongodbConfig.MONGODB_HOST,
port: mongodbConfig.MONGODB_PORT,
database: mongodbConfig.MONGODB_DATABASE,
username: mongodbConfig.MONGODB_USERNAME,
password: mongodbConfig.MONGODB_PASSWORD,
uri: mongodbConfig.MONGODB_URI
};
return new MongoDBClient(config);
}
/**
* Singleton MongoDB client instance
*/
let defaultClient: MongoDBClient | null = null;
/**
* Get or create the default MongoDB client instance
*/
export function getMongoDBClient(): MongoDBClient {
if (!defaultClient) {
defaultClient = createDefaultMongoDBClient();
}
return defaultClient;
}
/**
* Connect to MongoDB using the default client
*/
export async function connectMongoDB(): Promise<MongoDBClient> {
const client = getMongoDBClient();
if (!client.connected) {
await client.connect();
await client.createIndexes();
}
return client;
}
/**
* Disconnect from MongoDB
*/
export async function disconnectMongoDB(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}
import { mongodbConfig } from '@stock-bot/config';
import { MongoDBClient } from './client';
import type { MongoDBClientConfig, MongoDBConnectionOptions } from './types';
/**
* Factory function to create a MongoDB client instance
*/
export function createMongoDBClient(
config?: Partial<MongoDBClientConfig>,
options?: MongoDBConnectionOptions
): MongoDBClient {
return new MongoDBClient(config, options);
}
/**
* Create a MongoDB client with default configuration
*/
export function createDefaultMongoDBClient(): MongoDBClient {
const config: Partial<MongoDBClientConfig> = {
host: mongodbConfig.MONGODB_HOST,
port: mongodbConfig.MONGODB_PORT,
database: mongodbConfig.MONGODB_DATABASE,
username: mongodbConfig.MONGODB_USERNAME,
password: mongodbConfig.MONGODB_PASSWORD,
uri: mongodbConfig.MONGODB_URI,
};
return new MongoDBClient(config);
}
/**
* Singleton MongoDB client instance
*/
let defaultClient: MongoDBClient | null = null;
/**
* Get or create the default MongoDB client instance
*/
export function getMongoDBClient(): MongoDBClient {
if (!defaultClient) {
defaultClient = createDefaultMongoDBClient();
}
return defaultClient;
}
/**
* Connect to MongoDB using the default client
*/
export async function connectMongoDB(): Promise<MongoDBClient> {
const client = getMongoDBClient();
if (!client.connected) {
await client.connect();
await client.createIndexes();
}
return client;
}
/**
* Disconnect from MongoDB
*/
export async function disconnectMongoDB(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}

View file

@ -1,226 +1,233 @@
import { getLogger } from '@stock-bot/logger';
import type { MongoDBClient } from './client';
import type { MongoDBHealthCheck, MongoDBHealthStatus, MongoDBMetrics } from './types';
/**
* MongoDB Health Monitor
*
* Monitors MongoDB connection health and provides metrics
*/
export class MongoDBHealthMonitor {
private readonly client: MongoDBClient;
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private metrics: MongoDBMetrics;
private lastHealthCheck: MongoDBHealthCheck | null = null;
constructor(client: MongoDBClient) {
this.client = client;
this.logger = getLogger('mongodb-health-monitor');
this.metrics = {
operationsPerSecond: 0,
averageLatency: 0,
errorRate: 0,
connectionPoolUtilization: 0,
documentsProcessed: 0
};
}
/**
* Start health monitoring
*/
start(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stop();
}
this.logger.info(`Starting MongoDB health monitoring (interval: ${intervalMs}ms)`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed:', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed:', error);
});
}
/**
* Stop health monitoring
*/
stop(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Stopped MongoDB health monitoring');
}
}
/**
* Get current health status
*/
async getHealth(): Promise<MongoDBHealthCheck> {
if (!this.lastHealthCheck) {
await this.performHealthCheck();
}
return this.lastHealthCheck!;
}
/**
* Get current metrics
*/
getMetrics(): MongoDBMetrics {
return { ...this.metrics };
}
/**
* Perform a health check
*/
private async performHealthCheck(): Promise<void> {
const startTime = Date.now();
const errors: string[] = [];
let status: MongoDBHealthStatus = 'healthy';
try {
if (!this.client.connected) {
errors.push('MongoDB client not connected');
status = 'unhealthy';
} else {
// Test basic connectivity
const mongoClient = this.client.mongoClient;
const db = this.client.database;
if (!mongoClient || !db) {
errors.push('MongoDB client or database not available');
status = 'unhealthy';
} else {
// Ping the database
await db.admin().ping();
// Get server status for metrics
try {
const serverStatus = await db.admin().serverStatus();
this.updateMetricsFromServerStatus(serverStatus);
// Check connection pool status
const poolStats = this.getConnectionPoolStats(serverStatus);
if (poolStats.utilization > 0.9) {
errors.push('High connection pool utilization');
status = status === 'healthy' ? 'degraded' : status;
}
// Check for high latency
const latency = Date.now() - startTime;
if (latency > 1000) {
errors.push(`High latency: ${latency}ms`);
status = status === 'healthy' ? 'degraded' : status;
}
} catch (statusError) {
errors.push(`Failed to get server status: ${(statusError as Error).message}`);
status = 'degraded';
}
}
}
} catch (error) {
errors.push(`Health check failed: ${(error as Error).message}`);
status = 'unhealthy';
}
const latency = Date.now() - startTime;
// Get connection stats
const connectionStats = this.getConnectionStats();
this.lastHealthCheck = {
status,
timestamp: new Date(),
latency,
connections: connectionStats,
errors: errors.length > 0 ? errors : undefined
};
// Log health status changes
if (status !== 'healthy') {
this.logger.warn(`MongoDB health status: ${status}`, { errors, latency });
} else {
this.logger.debug(`MongoDB health check passed (${latency}ms)`);
}
}
/**
* Update metrics from MongoDB server status
*/
private updateMetricsFromServerStatus(serverStatus: any): void {
try {
const opcounters = serverStatus.opcounters || {};
const connections = serverStatus.connections || {};
const dur = serverStatus.dur || {};
// Calculate operations per second (approximate)
const totalOps = Object.values(opcounters).reduce((sum: number, count: any) => sum + (count || 0), 0);
this.metrics.operationsPerSecond = totalOps;
// Connection pool utilization
if (connections.current && connections.available) {
const total = connections.current + connections.available;
this.metrics.connectionPoolUtilization = connections.current / total;
}
// Average latency (from durability stats if available)
if (dur.timeMS) {
this.metrics.averageLatency = dur.timeMS.dt || 0;
} } catch (error) {
this.logger.debug('Error parsing server status for metrics:', error as any);
}
}
/**
* Get connection pool statistics
*/
private getConnectionPoolStats(serverStatus: any): { utilization: number; active: number; available: number } {
const connections = serverStatus.connections || {};
const active = connections.current || 0;
const available = connections.available || 0;
const total = active + available;
return {
utilization: total > 0 ? active / total : 0,
active,
available
};
}
/**
* Get connection statistics
*/
private getConnectionStats(): { active: number; available: number; total: number } {
// This would ideally come from the MongoDB driver's connection pool
// For now, we'll return estimated values
return {
active: 1,
available: 9,
total: 10
};
}
/**
* Update error rate metric
*/
updateErrorRate(errorCount: number, totalOperations: number): void {
this.metrics.errorRate = totalOperations > 0 ? errorCount / totalOperations : 0;
}
/**
* Update documents processed metric
*/
updateDocumentsProcessed(count: number): void {
this.metrics.documentsProcessed += count;
}
}
import { getLogger } from '@stock-bot/logger';
import type { MongoDBClient } from './client';
import type { MongoDBHealthCheck, MongoDBHealthStatus, MongoDBMetrics } from './types';
/**
* MongoDB Health Monitor
*
* Monitors MongoDB connection health and provides metrics
*/
export class MongoDBHealthMonitor {
private readonly client: MongoDBClient;
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private metrics: MongoDBMetrics;
private lastHealthCheck: MongoDBHealthCheck | null = null;
constructor(client: MongoDBClient) {
this.client = client;
this.logger = getLogger('mongodb-health-monitor');
this.metrics = {
operationsPerSecond: 0,
averageLatency: 0,
errorRate: 0,
connectionPoolUtilization: 0,
documentsProcessed: 0,
};
}
/**
* Start health monitoring
*/
start(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stop();
}
this.logger.info(`Starting MongoDB health monitoring (interval: ${intervalMs}ms)`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed:', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed:', error);
});
}
/**
* Stop health monitoring
*/
stop(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Stopped MongoDB health monitoring');
}
}
/**
* Get current health status
*/
async getHealth(): Promise<MongoDBHealthCheck> {
if (!this.lastHealthCheck) {
await this.performHealthCheck();
}
return this.lastHealthCheck!;
}
/**
* Get current metrics
*/
getMetrics(): MongoDBMetrics {
return { ...this.metrics };
}
/**
* Perform a health check
*/
private async performHealthCheck(): Promise<void> {
const startTime = Date.now();
const errors: string[] = [];
let status: MongoDBHealthStatus = 'healthy';
try {
if (!this.client.connected) {
errors.push('MongoDB client not connected');
status = 'unhealthy';
} else {
// Test basic connectivity
const mongoClient = this.client.mongoClient;
const db = this.client.database;
if (!mongoClient || !db) {
errors.push('MongoDB client or database not available');
status = 'unhealthy';
} else {
// Ping the database
await db.admin().ping();
// Get server status for metrics
try {
const serverStatus = await db.admin().serverStatus();
this.updateMetricsFromServerStatus(serverStatus);
// Check connection pool status
const poolStats = this.getConnectionPoolStats(serverStatus);
if (poolStats.utilization > 0.9) {
errors.push('High connection pool utilization');
status = status === 'healthy' ? 'degraded' : status;
}
// Check for high latency
const latency = Date.now() - startTime;
if (latency > 1000) {
errors.push(`High latency: ${latency}ms`);
status = status === 'healthy' ? 'degraded' : status;
}
} catch (statusError) {
errors.push(`Failed to get server status: ${(statusError as Error).message}`);
status = 'degraded';
}
}
}
} catch (error) {
errors.push(`Health check failed: ${(error as Error).message}`);
status = 'unhealthy';
}
const latency = Date.now() - startTime;
// Get connection stats
const connectionStats = this.getConnectionStats();
this.lastHealthCheck = {
status,
timestamp: new Date(),
latency,
connections: connectionStats,
errors: errors.length > 0 ? errors : undefined,
};
// Log health status changes
if (status !== 'healthy') {
this.logger.warn(`MongoDB health status: ${status}`, { errors, latency });
} else {
this.logger.debug(`MongoDB health check passed (${latency}ms)`);
}
}
/**
* Update metrics from MongoDB server status
*/
private updateMetricsFromServerStatus(serverStatus: any): void {
try {
const opcounters = serverStatus.opcounters || {};
const connections = serverStatus.connections || {};
const dur = serverStatus.dur || {};
// Calculate operations per second (approximate)
const totalOps = Object.values(opcounters).reduce(
(sum: number, count: any) => sum + (count || 0),
0
);
this.metrics.operationsPerSecond = totalOps;
// Connection pool utilization
if (connections.current && connections.available) {
const total = connections.current + connections.available;
this.metrics.connectionPoolUtilization = connections.current / total;
}
// Average latency (from durability stats if available)
if (dur.timeMS) {
this.metrics.averageLatency = dur.timeMS.dt || 0;
}
} catch (error) {
this.logger.debug('Error parsing server status for metrics:', error as any);
}
}
/**
* Get connection pool statistics
*/
private getConnectionPoolStats(serverStatus: any): {
utilization: number;
active: number;
available: number;
} {
const connections = serverStatus.connections || {};
const active = connections.current || 0;
const available = connections.available || 0;
const total = active + available;
return {
utilization: total > 0 ? active / total : 0,
active,
available,
};
}
/**
* Get connection statistics
*/
private getConnectionStats(): { active: number; available: number; total: number } {
// This would ideally come from the MongoDB driver's connection pool
// For now, we'll return estimated values
return {
active: 1,
available: 9,
total: 10,
};
}
/**
* Update error rate metric
*/
updateErrorRate(errorCount: number, totalOperations: number): void {
this.metrics.errorRate = totalOperations > 0 ? errorCount / totalOperations : 0;
}
/**
* Update documents processed metric
*/
updateDocumentsProcessed(count: number): void {
this.metrics.documentsProcessed += count;
}
}

View file

@ -1,40 +1,40 @@
/**
* MongoDB Client Library for Stock Bot
*
* Provides type-safe MongoDB access for document storage, sentiment data,
* and raw content processing.
*/
export { MongoDBClient } from './client';
export { MongoDBHealthMonitor } from './health';
export { MongoDBTransactionManager } from './transactions';
export { MongoDBAggregationBuilder } from './aggregation';
// Types
export type {
MongoDBClientConfig,
MongoDBConnectionOptions,
MongoDBHealthStatus,
MongoDBMetrics,
CollectionNames,
DocumentBase,
SentimentData,
RawDocument,
NewsArticle,
SecFiling,
EarningsTranscript,
AnalystReport
} from './types';
// Schemas
export {
sentimentDataSchema,
rawDocumentSchema,
newsArticleSchema,
secFilingSchema,
earningsTranscriptSchema,
analystReportSchema
} from './schemas';
// Utils
export { createMongoDBClient } from './factory';
/**
* MongoDB Client Library for Stock Bot
*
* Provides type-safe MongoDB access for document storage, sentiment data,
* and raw content processing.
*/
export { MongoDBClient } from './client';
export { MongoDBHealthMonitor } from './health';
export { MongoDBTransactionManager } from './transactions';
export { MongoDBAggregationBuilder } from './aggregation';
// Types
export type {
MongoDBClientConfig,
MongoDBConnectionOptions,
MongoDBHealthStatus,
MongoDBMetrics,
CollectionNames,
DocumentBase,
SentimentData,
RawDocument,
NewsArticle,
SecFiling,
EarningsTranscript,
AnalystReport,
} from './types';
// Schemas
export {
sentimentDataSchema,
rawDocumentSchema,
newsArticleSchema,
secFilingSchema,
earningsTranscriptSchema,
analystReportSchema,
} from './schemas';
// Utils
export { createMongoDBClient } from './factory';

View file

@ -1,132 +1,146 @@
import * as yup from 'yup';
/**
* Yup Schemas for MongoDB Document Validation
*/
// Base schema for all documents
export const documentBaseSchema = yup.object({
_id: yup.mixed().optional(),
created_at: yup.date().required(),
updated_at: yup.date().required(),
source: yup.string().required(),
metadata: yup.object().optional(),
});
// Sentiment Data Schema
export const sentimentDataSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
sentiment_score: yup.number().min(-1).max(1).required(),
sentiment_label: yup.string().oneOf(['positive', 'negative', 'neutral']).required(),
confidence: yup.number().min(0).max(1).required(),
text: yup.string().min(1).required(),
source_type: yup.string().oneOf(['reddit', 'twitter', 'news', 'forums']).required(),
source_id: yup.string().required(),
timestamp: yup.date().required(),
processed_at: yup.date().required(),
language: yup.string().default('en'),
keywords: yup.array(yup.string()).required(),
entities: yup.array(yup.object({
name: yup.string().required(),
type: yup.string().required(),
confidence: yup.number().min(0).max(1).required(),
})).required(),
});
// Raw Document Schema
export const rawDocumentSchema = documentBaseSchema.shape({
document_type: yup.string().oneOf(['html', 'pdf', 'text', 'json', 'xml']).required(),
content: yup.string().required(),
content_hash: yup.string().required(),
url: yup.string().url().optional(),
title: yup.string().optional(),
author: yup.string().optional(),
published_date: yup.date().optional(),
extracted_text: yup.string().optional(),
processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(),
size_bytes: yup.number().positive().required(),
language: yup.string().optional(),
});
// News Article Schema
export const newsArticleSchema = documentBaseSchema.shape({
headline: yup.string().min(1).required(),
content: yup.string().min(1).required(),
summary: yup.string().optional(),
author: yup.string().required(),
publication: yup.string().required(),
published_date: yup.date().required(),
url: yup.string().url().required(),
symbols: yup.array(yup.string()).required(),
categories: yup.array(yup.string()).required(),
sentiment_score: yup.number().min(-1).max(1).optional(),
relevance_score: yup.number().min(0).max(1).optional(),
image_url: yup.string().url().optional(),
tags: yup.array(yup.string()).required(),
});
// SEC Filing Schema
export const secFilingSchema = documentBaseSchema.shape({
cik: yup.string().required(),
accession_number: yup.string().required(),
filing_type: yup.string().required(),
company_name: yup.string().required(),
symbols: yup.array(yup.string()).required(),
filing_date: yup.date().required(),
period_end_date: yup.date().required(),
url: yup.string().url().required(),
content: yup.string().required(),
extracted_data: yup.object().optional(),
financial_statements: yup.array(yup.object({
statement_type: yup.string().required(),
data: yup.object().required(),
})).optional(),
processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(),
});
// Earnings Transcript Schema
export const earningsTranscriptSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
company_name: yup.string().required(),
quarter: yup.string().required(),
year: yup.number().min(2000).max(3000).required(),
call_date: yup.date().required(),
transcript: yup.string().required(),
participants: yup.array(yup.object({
name: yup.string().required(),
title: yup.string().required(),
type: yup.string().oneOf(['executive', 'analyst']).required(),
})).required(),
key_topics: yup.array(yup.string()).required(),
sentiment_analysis: yup.object({
overall_sentiment: yup.number().min(-1).max(1).required(),
topic_sentiments: yup.object().required(),
}).optional(),
financial_highlights: yup.object().optional(),
});
// Analyst Report Schema
export const analystReportSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
analyst_firm: yup.string().required(),
analyst_name: yup.string().required(),
report_title: yup.string().required(),
report_date: yup.date().required(),
rating: yup.string().oneOf(['buy', 'hold', 'sell', 'strong_buy', 'strong_sell']).required(),
price_target: yup.number().positive().optional(),
previous_rating: yup.string().optional(),
content: yup.string().required(),
summary: yup.string().required(),
key_points: yup.array(yup.string()).required(),
financial_projections: yup.object().optional(),
});
// Schema mapping for collections
export const schemaMap = {
sentiment_data: sentimentDataSchema,
raw_documents: rawDocumentSchema,
news_articles: newsArticleSchema,
sec_filings: secFilingSchema,
earnings_transcripts: earningsTranscriptSchema,
analyst_reports: analystReportSchema,
} as const;
import * as yup from 'yup';
/**
* Yup Schemas for MongoDB Document Validation
*/
// Base schema for all documents
export const documentBaseSchema = yup.object({
_id: yup.mixed().optional(),
created_at: yup.date().required(),
updated_at: yup.date().required(),
source: yup.string().required(),
metadata: yup.object().optional(),
});
// Sentiment Data Schema
export const sentimentDataSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
sentiment_score: yup.number().min(-1).max(1).required(),
sentiment_label: yup.string().oneOf(['positive', 'negative', 'neutral']).required(),
confidence: yup.number().min(0).max(1).required(),
text: yup.string().min(1).required(),
source_type: yup.string().oneOf(['reddit', 'twitter', 'news', 'forums']).required(),
source_id: yup.string().required(),
timestamp: yup.date().required(),
processed_at: yup.date().required(),
language: yup.string().default('en'),
keywords: yup.array(yup.string()).required(),
entities: yup
.array(
yup.object({
name: yup.string().required(),
type: yup.string().required(),
confidence: yup.number().min(0).max(1).required(),
})
)
.required(),
});
// Raw Document Schema
export const rawDocumentSchema = documentBaseSchema.shape({
document_type: yup.string().oneOf(['html', 'pdf', 'text', 'json', 'xml']).required(),
content: yup.string().required(),
content_hash: yup.string().required(),
url: yup.string().url().optional(),
title: yup.string().optional(),
author: yup.string().optional(),
published_date: yup.date().optional(),
extracted_text: yup.string().optional(),
processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(),
size_bytes: yup.number().positive().required(),
language: yup.string().optional(),
});
// News Article Schema
export const newsArticleSchema = documentBaseSchema.shape({
headline: yup.string().min(1).required(),
content: yup.string().min(1).required(),
summary: yup.string().optional(),
author: yup.string().required(),
publication: yup.string().required(),
published_date: yup.date().required(),
url: yup.string().url().required(),
symbols: yup.array(yup.string()).required(),
categories: yup.array(yup.string()).required(),
sentiment_score: yup.number().min(-1).max(1).optional(),
relevance_score: yup.number().min(0).max(1).optional(),
image_url: yup.string().url().optional(),
tags: yup.array(yup.string()).required(),
});
// SEC Filing Schema
export const secFilingSchema = documentBaseSchema.shape({
cik: yup.string().required(),
accession_number: yup.string().required(),
filing_type: yup.string().required(),
company_name: yup.string().required(),
symbols: yup.array(yup.string()).required(),
filing_date: yup.date().required(),
period_end_date: yup.date().required(),
url: yup.string().url().required(),
content: yup.string().required(),
extracted_data: yup.object().optional(),
financial_statements: yup
.array(
yup.object({
statement_type: yup.string().required(),
data: yup.object().required(),
})
)
.optional(),
processing_status: yup.string().oneOf(['pending', 'processed', 'failed']).required(),
});
// Earnings Transcript Schema
export const earningsTranscriptSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
company_name: yup.string().required(),
quarter: yup.string().required(),
year: yup.number().min(2000).max(3000).required(),
call_date: yup.date().required(),
transcript: yup.string().required(),
participants: yup
.array(
yup.object({
name: yup.string().required(),
title: yup.string().required(),
type: yup.string().oneOf(['executive', 'analyst']).required(),
})
)
.required(),
key_topics: yup.array(yup.string()).required(),
sentiment_analysis: yup
.object({
overall_sentiment: yup.number().min(-1).max(1).required(),
topic_sentiments: yup.object().required(),
})
.optional(),
financial_highlights: yup.object().optional(),
});
// Analyst Report Schema
export const analystReportSchema = documentBaseSchema.shape({
symbol: yup.string().min(1).max(10).required(),
analyst_firm: yup.string().required(),
analyst_name: yup.string().required(),
report_title: yup.string().required(),
report_date: yup.date().required(),
rating: yup.string().oneOf(['buy', 'hold', 'sell', 'strong_buy', 'strong_sell']).required(),
price_target: yup.number().positive().optional(),
previous_rating: yup.string().optional(),
content: yup.string().required(),
summary: yup.string().required(),
key_points: yup.array(yup.string()).required(),
financial_projections: yup.object().optional(),
});
// Schema mapping for collections
export const schemaMap = {
sentiment_data: sentimentDataSchema,
raw_documents: rawDocumentSchema,
news_articles: newsArticleSchema,
sec_filings: secFilingSchema,
earnings_transcripts: earningsTranscriptSchema,
analyst_reports: analystReportSchema,
} as const;

View file

@ -1,238 +1,238 @@
import { getLogger } from '@stock-bot/logger';
import type { MongoDBClient } from './client';
import type { CollectionNames, DocumentBase } from './types';
import type { WithId, OptionalUnlessRequiredId } from 'mongodb';
/**
* MongoDB Transaction Manager
*
* Provides transaction support for multi-document operations
*/
export class MongoDBTransactionManager {
private readonly client: MongoDBClient;
private readonly logger: ReturnType<typeof getLogger>;
constructor(client: MongoDBClient) {
this.client = client;
this.logger = getLogger('mongodb-transaction-manager');
}
/**
* Execute operations within a transaction
*/
async withTransaction<T>(
operations: (session: any) => Promise<T>,
options?: {
readPreference?: string;
readConcern?: string;
writeConcern?: any;
maxCommitTimeMS?: number;
}
): Promise<T> {
const mongoClient = this.client.mongoClient;
if (!mongoClient) {
throw new Error('MongoDB client not connected');
}
const session = mongoClient.startSession();
try {
this.logger.debug('Starting MongoDB transaction');
const result = await session.withTransaction(
async () => {
return await operations(session);
}, {
readPreference: options?.readPreference as any,
readConcern: { level: options?.readConcern || 'majority' } as any,
writeConcern: options?.writeConcern || { w: 'majority' },
maxCommitTimeMS: options?.maxCommitTimeMS || 10000
}
);
this.logger.debug('MongoDB transaction completed successfully');
return result;
} catch (error) {
this.logger.error('MongoDB transaction failed:', error);
throw error;
} finally {
await session.endSession();
}
}
/**
* Batch insert documents across collections within a transaction
*/
async batchInsert(
operations: Array<{
collection: CollectionNames;
documents: DocumentBase[];
}>,
options?: { ordered?: boolean; bypassDocumentValidation?: boolean }
): Promise<void> {
await this.withTransaction(async (session) => {
for (const operation of operations) {
const collection = this.client.getCollection(operation.collection);
// Add timestamps to all documents
const now = new Date();
const documentsWithTimestamps = operation.documents.map(doc => ({
...doc,
created_at: doc.created_at || now,
updated_at: now
}));
await collection.insertMany(documentsWithTimestamps, {
session,
ordered: options?.ordered ?? true,
bypassDocumentValidation: options?.bypassDocumentValidation ?? false
});
this.logger.debug(`Inserted ${documentsWithTimestamps.length} documents into ${operation.collection}`);
}
});
}
/**
* Batch update documents across collections within a transaction
*/
async batchUpdate(
operations: Array<{
collection: CollectionNames;
filter: any;
update: any;
options?: any;
}>
): Promise<void> {
await this.withTransaction(async (session) => {
const results = [];
for (const operation of operations) {
const collection = this.client.getCollection(operation.collection);
// Add updated timestamp
const updateWithTimestamp = {
...operation.update,
$set: {
...operation.update.$set,
updated_at: new Date()
}
};
const result = await collection.updateMany(
operation.filter,
updateWithTimestamp,
{
session,
...operation.options
}
);
results.push(result);
this.logger.debug(`Updated ${result.modifiedCount} documents in ${operation.collection}`);
}
return results;
});
}
/**
* Move documents between collections within a transaction
*/
async moveDocuments<T extends DocumentBase>(
fromCollection: CollectionNames,
toCollection: CollectionNames,
filter: any,
transform?: (doc: T) => T
): Promise<number> {
return await this.withTransaction(async (session) => {
const sourceCollection = this.client.getCollection<T>(fromCollection);
const targetCollection = this.client.getCollection<T>(toCollection);
// Find documents to move
const documents = await sourceCollection.find(filter, { session }).toArray();
if (documents.length === 0) {
return 0;
} // Transform documents if needed
const documentsToInsert = transform
? documents.map((doc: WithId<T>) => transform(doc as T))
: documents;
// Add updated timestamp
const now = new Date();
documentsToInsert.forEach(doc => {
doc.updated_at = now;
}); // Insert into target collection
await targetCollection.insertMany(documentsToInsert as OptionalUnlessRequiredId<T>[], { session });
// Remove from source collection
const deleteResult = await sourceCollection.deleteMany(filter, { session });
this.logger.info(`Moved ${documents.length} documents from ${fromCollection} to ${toCollection}`);
return deleteResult.deletedCount || 0;
});
}
/**
* Archive old documents within a transaction
*/
async archiveDocuments(
sourceCollection: CollectionNames,
archiveCollection: CollectionNames,
cutoffDate: Date,
batchSize: number = 1000
): Promise<number> {
let totalArchived = 0;
while (true) {
const batchArchived = await this.withTransaction(async (session) => {
const collection = this.client.getCollection(sourceCollection);
const archiveCol = this.client.getCollection(archiveCollection);
// Find old documents
const documents = await collection.find(
{ created_at: { $lt: cutoffDate } },
{ limit: batchSize, session }
).toArray();
if (documents.length === 0) {
return 0;
}
// Add archive metadata
const now = new Date();
const documentsToArchive = documents.map(doc => ({
...doc,
archived_at: now,
archived_from: sourceCollection
}));
// Insert into archive collection
await archiveCol.insertMany(documentsToArchive, { session });
// Remove from source collection
const ids = documents.map(doc => doc._id);
const deleteResult = await collection.deleteMany(
{ _id: { $in: ids } },
{ session }
);
return deleteResult.deletedCount || 0;
});
totalArchived += batchArchived;
if (batchArchived === 0) {
break;
}
this.logger.debug(`Archived batch of ${batchArchived} documents`);
}
this.logger.info(`Archived ${totalArchived} documents from ${sourceCollection} to ${archiveCollection}`);
return totalArchived;
}
}
import type { OptionalUnlessRequiredId, WithId } from 'mongodb';
import { getLogger } from '@stock-bot/logger';
import type { MongoDBClient } from './client';
import type { CollectionNames, DocumentBase } from './types';
/**
* MongoDB Transaction Manager
*
* Provides transaction support for multi-document operations
*/
export class MongoDBTransactionManager {
private readonly client: MongoDBClient;
private readonly logger: ReturnType<typeof getLogger>;
constructor(client: MongoDBClient) {
this.client = client;
this.logger = getLogger('mongodb-transaction-manager');
}
/**
* Execute operations within a transaction
*/
async withTransaction<T>(
operations: (session: any) => Promise<T>,
options?: {
readPreference?: string;
readConcern?: string;
writeConcern?: any;
maxCommitTimeMS?: number;
}
): Promise<T> {
const mongoClient = this.client.mongoClient;
if (!mongoClient) {
throw new Error('MongoDB client not connected');
}
const session = mongoClient.startSession();
try {
this.logger.debug('Starting MongoDB transaction');
const result = await session.withTransaction(
async () => {
return await operations(session);
},
{
readPreference: options?.readPreference as any,
readConcern: { level: options?.readConcern || 'majority' } as any,
writeConcern: options?.writeConcern || { w: 'majority' },
maxCommitTimeMS: options?.maxCommitTimeMS || 10000,
}
);
this.logger.debug('MongoDB transaction completed successfully');
return result;
} catch (error) {
this.logger.error('MongoDB transaction failed:', error);
throw error;
} finally {
await session.endSession();
}
}
/**
* Batch insert documents across collections within a transaction
*/
async batchInsert(
operations: Array<{
collection: CollectionNames;
documents: DocumentBase[];
}>,
options?: { ordered?: boolean; bypassDocumentValidation?: boolean }
): Promise<void> {
await this.withTransaction(async session => {
for (const operation of operations) {
const collection = this.client.getCollection(operation.collection);
// Add timestamps to all documents
const now = new Date();
const documentsWithTimestamps = operation.documents.map(doc => ({
...doc,
created_at: doc.created_at || now,
updated_at: now,
}));
await collection.insertMany(documentsWithTimestamps, {
session,
ordered: options?.ordered ?? true,
bypassDocumentValidation: options?.bypassDocumentValidation ?? false,
});
this.logger.debug(
`Inserted ${documentsWithTimestamps.length} documents into ${operation.collection}`
);
}
});
}
/**
* Batch update documents across collections within a transaction
*/
async batchUpdate(
operations: Array<{
collection: CollectionNames;
filter: any;
update: any;
options?: any;
}>
): Promise<void> {
await this.withTransaction(async session => {
const results = [];
for (const operation of operations) {
const collection = this.client.getCollection(operation.collection);
// Add updated timestamp
const updateWithTimestamp = {
...operation.update,
$set: {
...operation.update.$set,
updated_at: new Date(),
},
};
const result = await collection.updateMany(operation.filter, updateWithTimestamp, {
session,
...operation.options,
});
results.push(result);
this.logger.debug(`Updated ${result.modifiedCount} documents in ${operation.collection}`);
}
return results;
});
}
/**
* Move documents between collections within a transaction
*/
async moveDocuments<T extends DocumentBase>(
fromCollection: CollectionNames,
toCollection: CollectionNames,
filter: any,
transform?: (doc: T) => T
): Promise<number> {
return await this.withTransaction(async session => {
const sourceCollection = this.client.getCollection<T>(fromCollection);
const targetCollection = this.client.getCollection<T>(toCollection);
// Find documents to move
const documents = await sourceCollection.find(filter, { session }).toArray();
if (documents.length === 0) {
return 0;
} // Transform documents if needed
const documentsToInsert = transform
? documents.map((doc: WithId<T>) => transform(doc as T))
: documents;
// Add updated timestamp
const now = new Date();
documentsToInsert.forEach(doc => {
doc.updated_at = now;
}); // Insert into target collection
await targetCollection.insertMany(documentsToInsert as OptionalUnlessRequiredId<T>[], {
session,
});
// Remove from source collection
const deleteResult = await sourceCollection.deleteMany(filter, { session });
this.logger.info(
`Moved ${documents.length} documents from ${fromCollection} to ${toCollection}`
);
return deleteResult.deletedCount || 0;
});
}
/**
* Archive old documents within a transaction
*/
async archiveDocuments(
sourceCollection: CollectionNames,
archiveCollection: CollectionNames,
cutoffDate: Date,
batchSize: number = 1000
): Promise<number> {
let totalArchived = 0;
while (true) {
const batchArchived = await this.withTransaction(async session => {
const collection = this.client.getCollection(sourceCollection);
const archiveCol = this.client.getCollection(archiveCollection);
// Find old documents
const documents = await collection
.find({ created_at: { $lt: cutoffDate } }, { limit: batchSize, session })
.toArray();
if (documents.length === 0) {
return 0;
}
// Add archive metadata
const now = new Date();
const documentsToArchive = documents.map(doc => ({
...doc,
archived_at: now,
archived_from: sourceCollection,
}));
// Insert into archive collection
await archiveCol.insertMany(documentsToArchive, { session });
// Remove from source collection
const ids = documents.map(doc => doc._id);
const deleteResult = await collection.deleteMany({ _id: { $in: ids } }, { session });
return deleteResult.deletedCount || 0;
});
totalArchived += batchArchived;
if (batchArchived === 0) {
break;
}
this.logger.debug(`Archived batch of ${batchArchived} documents`);
}
this.logger.info(
`Archived ${totalArchived} documents from ${sourceCollection} to ${archiveCollection}`
);
return totalArchived;
}
}

View file

@ -1,215 +1,215 @@
import * as yup from 'yup';
import type { ObjectId } from 'mongodb';
/**
* MongoDB Client Configuration
*/
export interface MongoDBClientConfig {
host: string;
port: number;
database: string;
username?: string;
password?: string;
authSource?: string;
uri?: string;
poolSettings?: {
maxPoolSize: number;
minPoolSize: number;
maxIdleTime: number;
};
timeouts?: {
connectTimeout: number;
socketTimeout: number;
serverSelectionTimeout: number;
};
tls?: {
enabled: boolean;
insecure: boolean;
caFile?: string;
};
options?: {
retryWrites: boolean;
journal: boolean;
readPreference: 'primary' | 'primaryPreferred' | 'secondary' | 'secondaryPreferred' | 'nearest';
writeConcern: string;
};
}
/**
* MongoDB Connection Options
*/
export interface MongoDBConnectionOptions {
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type MongoDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface MongoDBHealthCheck {
status: MongoDBHealthStatus;
timestamp: Date;
latency: number;
connections: {
active: number;
available: number;
total: number;
};
errors?: string[];
}
export interface MongoDBMetrics {
operationsPerSecond: number;
averageLatency: number;
errorRate: number;
connectionPoolUtilization: number;
documentsProcessed: number;
}
/**
* Collection Names
*/
export type CollectionNames =
| 'sentiment_data'
| 'raw_documents'
| 'news_articles'
| 'sec_filings'
| 'earnings_transcripts'
| 'analyst_reports'
| 'social_media_posts'
| 'market_events'
| 'economic_indicators';
/**
* Base Document Interface
*/
export interface DocumentBase {
_id?: ObjectId;
created_at: Date;
updated_at: Date;
source: string;
metadata?: Record<string, any>;
}
/**
* Sentiment Data Document
*/
export interface SentimentData extends DocumentBase {
symbol: string;
sentiment_score: number;
sentiment_label: 'positive' | 'negative' | 'neutral';
confidence: number;
text: string;
source_type: 'reddit' | 'twitter' | 'news' | 'forums';
source_id: string;
timestamp: Date;
processed_at: Date;
language: string;
keywords: string[];
entities: Array<{
name: string;
type: string;
confidence: number;
}>;
}
/**
* Raw Document
*/
export interface RawDocument extends DocumentBase {
document_type: 'html' | 'pdf' | 'text' | 'json' | 'xml';
content: string;
content_hash: string;
url?: string;
title?: string;
author?: string;
published_date?: Date;
extracted_text?: string;
processing_status: 'pending' | 'processed' | 'failed';
size_bytes: number;
language?: string;
}
/**
* News Article
*/
export interface NewsArticle extends DocumentBase {
headline: string;
content: string;
summary?: string;
author: string;
publication: string;
published_date: Date;
url: string;
symbols: string[];
categories: string[];
sentiment_score?: number;
relevance_score?: number;
image_url?: string;
tags: string[];
}
/**
* SEC Filing
*/
export interface SecFiling extends DocumentBase {
cik: string;
accession_number: string;
filing_type: string;
company_name: string;
symbols: string[];
filing_date: Date;
period_end_date: Date;
url: string;
content: string;
extracted_data?: Record<string, any>;
financial_statements?: Array<{
statement_type: string;
data: Record<string, number>;
}>;
processing_status: 'pending' | 'processed' | 'failed';
}
/**
* Earnings Transcript
*/
export interface EarningsTranscript extends DocumentBase {
symbol: string;
company_name: string;
quarter: string;
year: number;
call_date: Date;
transcript: string;
participants: Array<{
name: string;
title: string;
type: 'executive' | 'analyst';
}>;
key_topics: string[];
sentiment_analysis?: {
overall_sentiment: number;
topic_sentiments: Record<string, number>;
};
financial_highlights?: Record<string, number>;
}
/**
* Analyst Report
*/
export interface AnalystReport extends DocumentBase {
symbol: string;
analyst_firm: string;
analyst_name: string;
report_title: string;
report_date: Date;
rating: 'buy' | 'hold' | 'sell' | 'strong_buy' | 'strong_sell';
price_target?: number;
previous_rating?: string;
content: string;
summary: string;
key_points: string[];
financial_projections?: Record<string, number>;
}
import type { ObjectId } from 'mongodb';
import * as yup from 'yup';
/**
* MongoDB Client Configuration
*/
export interface MongoDBClientConfig {
host: string;
port: number;
database: string;
username?: string;
password?: string;
authSource?: string;
uri?: string;
poolSettings?: {
maxPoolSize: number;
minPoolSize: number;
maxIdleTime: number;
};
timeouts?: {
connectTimeout: number;
socketTimeout: number;
serverSelectionTimeout: number;
};
tls?: {
enabled: boolean;
insecure: boolean;
caFile?: string;
};
options?: {
retryWrites: boolean;
journal: boolean;
readPreference: 'primary' | 'primaryPreferred' | 'secondary' | 'secondaryPreferred' | 'nearest';
writeConcern: string;
};
}
/**
* MongoDB Connection Options
*/
export interface MongoDBConnectionOptions {
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type MongoDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface MongoDBHealthCheck {
status: MongoDBHealthStatus;
timestamp: Date;
latency: number;
connections: {
active: number;
available: number;
total: number;
};
errors?: string[];
}
export interface MongoDBMetrics {
operationsPerSecond: number;
averageLatency: number;
errorRate: number;
connectionPoolUtilization: number;
documentsProcessed: number;
}
/**
* Collection Names
*/
export type CollectionNames =
| 'sentiment_data'
| 'raw_documents'
| 'news_articles'
| 'sec_filings'
| 'earnings_transcripts'
| 'analyst_reports'
| 'social_media_posts'
| 'market_events'
| 'economic_indicators';
/**
* Base Document Interface
*/
export interface DocumentBase {
_id?: ObjectId;
created_at: Date;
updated_at: Date;
source: string;
metadata?: Record<string, any>;
}
/**
* Sentiment Data Document
*/
export interface SentimentData extends DocumentBase {
symbol: string;
sentiment_score: number;
sentiment_label: 'positive' | 'negative' | 'neutral';
confidence: number;
text: string;
source_type: 'reddit' | 'twitter' | 'news' | 'forums';
source_id: string;
timestamp: Date;
processed_at: Date;
language: string;
keywords: string[];
entities: Array<{
name: string;
type: string;
confidence: number;
}>;
}
/**
* Raw Document
*/
export interface RawDocument extends DocumentBase {
document_type: 'html' | 'pdf' | 'text' | 'json' | 'xml';
content: string;
content_hash: string;
url?: string;
title?: string;
author?: string;
published_date?: Date;
extracted_text?: string;
processing_status: 'pending' | 'processed' | 'failed';
size_bytes: number;
language?: string;
}
/**
* News Article
*/
export interface NewsArticle extends DocumentBase {
headline: string;
content: string;
summary?: string;
author: string;
publication: string;
published_date: Date;
url: string;
symbols: string[];
categories: string[];
sentiment_score?: number;
relevance_score?: number;
image_url?: string;
tags: string[];
}
/**
* SEC Filing
*/
export interface SecFiling extends DocumentBase {
cik: string;
accession_number: string;
filing_type: string;
company_name: string;
symbols: string[];
filing_date: Date;
period_end_date: Date;
url: string;
content: string;
extracted_data?: Record<string, any>;
financial_statements?: Array<{
statement_type: string;
data: Record<string, number>;
}>;
processing_status: 'pending' | 'processed' | 'failed';
}
/**
* Earnings Transcript
*/
export interface EarningsTranscript extends DocumentBase {
symbol: string;
company_name: string;
quarter: string;
year: number;
call_date: Date;
transcript: string;
participants: Array<{
name: string;
title: string;
type: 'executive' | 'analyst';
}>;
key_topics: string[];
sentiment_analysis?: {
overall_sentiment: number;
topic_sentiments: Record<string, number>;
};
financial_highlights?: Record<string, number>;
}
/**
* Analyst Report
*/
export interface AnalystReport extends DocumentBase {
symbol: string;
analyst_firm: string;
analyst_name: string;
report_title: string;
report_date: Date;
rating: 'buy' | 'hold' | 'sell' | 'strong_buy' | 'strong_sell';
price_target?: number;
previous_rating?: string;
content: string;
summary: string;
key_points: string[];
financial_projections?: Record<string, number>;
}

View file

@ -1,339 +1,348 @@
import { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg';
import { postgresConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import type {
PostgreSQLClientConfig,
PostgreSQLConnectionOptions,
QueryResult,
TransactionCallback
} from './types';
import { PostgreSQLHealthMonitor } from './health';
import { PostgreSQLQueryBuilder } from './query-builder';
import { PostgreSQLTransactionManager } from './transactions';
/**
* PostgreSQL Client for Stock Bot
*
* Provides type-safe access to PostgreSQL with connection pooling,
* health monitoring, and transaction support.
*/
export class PostgreSQLClient {
private pool: Pool | null = null;
private readonly config: PostgreSQLClientConfig;
private readonly options: PostgreSQLConnectionOptions;
private readonly logger: ReturnType<typeof getLogger>;
private readonly healthMonitor: PostgreSQLHealthMonitor;
private readonly transactionManager: PostgreSQLTransactionManager;
private isConnected = false;
constructor(
config?: Partial<PostgreSQLClientConfig>,
options?: PostgreSQLConnectionOptions
) {
this.config = this.buildConfig(config);
this.options = {
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options
};
this.logger = getLogger('postgres-client');
this.healthMonitor = new PostgreSQLHealthMonitor(this);
this.transactionManager = new PostgreSQLTransactionManager(this);
}
/**
* Connect to PostgreSQL
*/
async connect(): Promise<void> {
if (this.isConnected && this.pool) {
return;
}
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(`Connecting to PostgreSQL (attempt ${attempt}/${this.options.retryAttempts})...`);
this.pool = new Pool(this.buildPoolConfig());
// Test the connection
const client = await this.pool.connect();
await client.query('SELECT 1');
client.release();
this.isConnected = true;
this.logger.info('Successfully connected to PostgreSQL');
// Start health monitoring
this.healthMonitor.start();
// Setup error handlers
this.setupErrorHandlers();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`PostgreSQL connection attempt ${attempt} failed:`, error);
if (this.pool) {
await this.pool.end();
this.pool = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(`Failed to connect to PostgreSQL after ${this.options.retryAttempts} attempts: ${lastError?.message}`);
}
/**
* Disconnect from PostgreSQL
*/
async disconnect(): Promise<void> {
if (!this.pool) {
return;
}
try {
this.healthMonitor.stop();
await this.pool.end();
this.isConnected = false;
this.pool = null;
this.logger.info('Disconnected from PostgreSQL');
} catch (error) {
this.logger.error('Error disconnecting from PostgreSQL:', error);
throw error;
}
}
/**
* Execute a query
*/
async query<T extends QueryResultRow = any>(text: string, params?: any[]): Promise<QueryResult<T>> {
if (!this.pool) {
throw new Error('PostgreSQL client not connected');
}
const startTime = Date.now();
try {
const result = await this.pool.query<T>(text, params);
const executionTime = Date.now() - startTime;
this.logger.debug(`Query executed in ${executionTime}ms`, {
query: text.substring(0, 100),
params: params?.length
});
return {
...result,
executionTime
} as QueryResult<T>;
} catch (error) {
const executionTime = Date.now() - startTime;
this.logger.error(`Query failed after ${executionTime}ms:`, {
error,
query: text,
params
});
throw error;
}
}
/**
* Execute multiple queries in a transaction
*/
async transaction<T>(callback: TransactionCallback<T>): Promise<T> {
return await this.transactionManager.execute(callback);
}
/**
* Get a query builder instance
*/
queryBuilder(): PostgreSQLQueryBuilder {
return new PostgreSQLQueryBuilder(this);
}
/**
* Create a new query builder with SELECT
*/
select(columns: string | string[] = '*'): PostgreSQLQueryBuilder {
return this.queryBuilder().select(columns);
}
/**
* Create a new query builder with INSERT
*/
insert(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().insert(table);
}
/**
* Create a new query builder with UPDATE
*/
update(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().update(table);
}
/**
* Create a new query builder with DELETE
*/
delete(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().delete(table);
}
/**
* Execute a stored procedure or function
*/
async callFunction<T extends QueryResultRow = any>(functionName: string, params?: any[]): Promise<QueryResult<T>> {
const placeholders = params ? params.map((_, i) => `$${i + 1}`).join(', ') : '';
const query = `SELECT * FROM ${functionName}(${placeholders})`;
return await this.query<T>(query, params);
}
/**
* Check if a table exists
*/
async tableExists(tableName: string, schemaName: string = 'public'): Promise<boolean> {
const result = await this.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = $1 AND table_name = $2
)`,
[schemaName, tableName]
);
return result.rows[0].exists;
}
/**
* Get table schema information
*/
async getTableSchema(tableName: string, schemaName: string = 'public'): Promise<any[]> {
const result = await this.query(
`SELECT
column_name,
data_type,
is_nullable,
column_default,
character_maximum_length
FROM information_schema.columns
WHERE table_schema = $1 AND table_name = $2
ORDER BY ordinal_position`,
[schemaName, tableName]
);
return result.rows;
}
/**
* Execute EXPLAIN for query analysis
*/
async explain(query: string, params?: any[]): Promise<any[]> {
const explainQuery = `EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON) ${query}`;
const result = await this.query(explainQuery, params);
return result.rows[0]['QUERY PLAN'];
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
const result = await this.query(`
SELECT
(SELECT count(*) FROM pg_stat_activity WHERE state = 'active') as active_connections,
(SELECT count(*) FROM pg_stat_activity WHERE state = 'idle') as idle_connections,
(SELECT setting FROM pg_settings WHERE name = 'max_connections') as max_connections,
pg_size_pretty(pg_database_size(current_database())) as database_size
`);
return result.rows[0];
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.pool;
}
/**
* Get the underlying connection pool
*/
get connectionPool(): Pool | null {
return this.pool;
}
private buildConfig(config?: Partial<PostgreSQLClientConfig>): PostgreSQLClientConfig {
return {
host: config?.host || postgresConfig.POSTGRES_HOST,
port: config?.port || postgresConfig.POSTGRES_PORT,
database: config?.database || postgresConfig.POSTGRES_DATABASE,
username: config?.username || postgresConfig.POSTGRES_USERNAME,
password: config?.password || postgresConfig.POSTGRES_PASSWORD,
poolSettings: {
min: postgresConfig.POSTGRES_POOL_MIN,
max: postgresConfig.POSTGRES_POOL_MAX,
idleTimeoutMillis: postgresConfig.POSTGRES_POOL_IDLE_TIMEOUT,
...config?.poolSettings
},
ssl: {
enabled: postgresConfig.POSTGRES_SSL,
rejectUnauthorized: postgresConfig.POSTGRES_SSL_REJECT_UNAUTHORIZED,
...config?.ssl
},
timeouts: {
query: postgresConfig.POSTGRES_QUERY_TIMEOUT,
connection: postgresConfig.POSTGRES_CONNECTION_TIMEOUT,
statement: postgresConfig.POSTGRES_STATEMENT_TIMEOUT,
lock: postgresConfig.POSTGRES_LOCK_TIMEOUT,
idleInTransaction: postgresConfig.POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
...config?.timeouts
}
};
}
private buildPoolConfig(): any {
return {
host: this.config.host,
port: this.config.port,
database: this.config.database,
user: this.config.username,
password: this.config.password,
min: this.config.poolSettings?.min,
max: this.config.poolSettings?.max,
idleTimeoutMillis: this.config.poolSettings?.idleTimeoutMillis,
connectionTimeoutMillis: this.config.timeouts?.connection,
query_timeout: this.config.timeouts?.query,
statement_timeout: this.config.timeouts?.statement,
lock_timeout: this.config.timeouts?.lock,
idle_in_transaction_session_timeout: this.config.timeouts?.idleInTransaction,
ssl: this.config.ssl?.enabled ? {
rejectUnauthorized: this.config.ssl.rejectUnauthorized
} : false
};
}
private setupErrorHandlers(): void {
if (!this.pool) return;
this.pool.on('error', (error) => {
this.logger.error('PostgreSQL pool error:', error);
});
this.pool.on('connect', () => {
this.logger.debug('New PostgreSQL client connected');
});
this.pool.on('remove', () => {
this.logger.debug('PostgreSQL client removed from pool');
});
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
import { QueryResult as PgQueryResult, Pool, PoolClient, QueryResultRow } from 'pg';
import { postgresConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import { PostgreSQLHealthMonitor } from './health';
import { PostgreSQLQueryBuilder } from './query-builder';
import { PostgreSQLTransactionManager } from './transactions';
import type {
PostgreSQLClientConfig,
PostgreSQLConnectionOptions,
QueryResult,
TransactionCallback,
} from './types';
/**
* PostgreSQL Client for Stock Bot
*
* Provides type-safe access to PostgreSQL with connection pooling,
* health monitoring, and transaction support.
*/
export class PostgreSQLClient {
private pool: Pool | null = null;
private readonly config: PostgreSQLClientConfig;
private readonly options: PostgreSQLConnectionOptions;
private readonly logger: ReturnType<typeof getLogger>;
private readonly healthMonitor: PostgreSQLHealthMonitor;
private readonly transactionManager: PostgreSQLTransactionManager;
private isConnected = false;
constructor(config?: Partial<PostgreSQLClientConfig>, options?: PostgreSQLConnectionOptions) {
this.config = this.buildConfig(config);
this.options = {
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options,
};
this.logger = getLogger('postgres-client');
this.healthMonitor = new PostgreSQLHealthMonitor(this);
this.transactionManager = new PostgreSQLTransactionManager(this);
}
/**
* Connect to PostgreSQL
*/
async connect(): Promise<void> {
if (this.isConnected && this.pool) {
return;
}
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(
`Connecting to PostgreSQL (attempt ${attempt}/${this.options.retryAttempts})...`
);
this.pool = new Pool(this.buildPoolConfig());
// Test the connection
const client = await this.pool.connect();
await client.query('SELECT 1');
client.release();
this.isConnected = true;
this.logger.info('Successfully connected to PostgreSQL');
// Start health monitoring
this.healthMonitor.start();
// Setup error handlers
this.setupErrorHandlers();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`PostgreSQL connection attempt ${attempt} failed:`, error);
if (this.pool) {
await this.pool.end();
this.pool = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(
`Failed to connect to PostgreSQL after ${this.options.retryAttempts} attempts: ${lastError?.message}`
);
}
/**
* Disconnect from PostgreSQL
*/
async disconnect(): Promise<void> {
if (!this.pool) {
return;
}
try {
this.healthMonitor.stop();
await this.pool.end();
this.isConnected = false;
this.pool = null;
this.logger.info('Disconnected from PostgreSQL');
} catch (error) {
this.logger.error('Error disconnecting from PostgreSQL:', error);
throw error;
}
}
/**
* Execute a query
*/
async query<T extends QueryResultRow = any>(
text: string,
params?: any[]
): Promise<QueryResult<T>> {
if (!this.pool) {
throw new Error('PostgreSQL client not connected');
}
const startTime = Date.now();
try {
const result = await this.pool.query<T>(text, params);
const executionTime = Date.now() - startTime;
this.logger.debug(`Query executed in ${executionTime}ms`, {
query: text.substring(0, 100),
params: params?.length,
});
return {
...result,
executionTime,
} as QueryResult<T>;
} catch (error) {
const executionTime = Date.now() - startTime;
this.logger.error(`Query failed after ${executionTime}ms:`, {
error,
query: text,
params,
});
throw error;
}
}
/**
* Execute multiple queries in a transaction
*/
async transaction<T>(callback: TransactionCallback<T>): Promise<T> {
return await this.transactionManager.execute(callback);
}
/**
* Get a query builder instance
*/
queryBuilder(): PostgreSQLQueryBuilder {
return new PostgreSQLQueryBuilder(this);
}
/**
* Create a new query builder with SELECT
*/
select(columns: string | string[] = '*'): PostgreSQLQueryBuilder {
return this.queryBuilder().select(columns);
}
/**
* Create a new query builder with INSERT
*/
insert(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().insert(table);
}
/**
* Create a new query builder with UPDATE
*/
update(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().update(table);
}
/**
* Create a new query builder with DELETE
*/
delete(table: string): PostgreSQLQueryBuilder {
return this.queryBuilder().delete(table);
}
/**
* Execute a stored procedure or function
*/
async callFunction<T extends QueryResultRow = any>(
functionName: string,
params?: any[]
): Promise<QueryResult<T>> {
const placeholders = params ? params.map((_, i) => `$${i + 1}`).join(', ') : '';
const query = `SELECT * FROM ${functionName}(${placeholders})`;
return await this.query<T>(query, params);
}
/**
* Check if a table exists
*/
async tableExists(tableName: string, schemaName: string = 'public'): Promise<boolean> {
const result = await this.query(
`SELECT EXISTS (
SELECT FROM information_schema.tables
WHERE table_schema = $1 AND table_name = $2
)`,
[schemaName, tableName]
);
return result.rows[0].exists;
}
/**
* Get table schema information
*/
async getTableSchema(tableName: string, schemaName: string = 'public'): Promise<any[]> {
const result = await this.query(
`SELECT
column_name,
data_type,
is_nullable,
column_default,
character_maximum_length
FROM information_schema.columns
WHERE table_schema = $1 AND table_name = $2
ORDER BY ordinal_position`,
[schemaName, tableName]
);
return result.rows;
}
/**
* Execute EXPLAIN for query analysis
*/
async explain(query: string, params?: any[]): Promise<any[]> {
const explainQuery = `EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON) ${query}`;
const result = await this.query(explainQuery, params);
return result.rows[0]['QUERY PLAN'];
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
const result = await this.query(`
SELECT
(SELECT count(*) FROM pg_stat_activity WHERE state = 'active') as active_connections,
(SELECT count(*) FROM pg_stat_activity WHERE state = 'idle') as idle_connections,
(SELECT setting FROM pg_settings WHERE name = 'max_connections') as max_connections,
pg_size_pretty(pg_database_size(current_database())) as database_size
`);
return result.rows[0];
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.pool;
}
/**
* Get the underlying connection pool
*/
get connectionPool(): Pool | null {
return this.pool;
}
private buildConfig(config?: Partial<PostgreSQLClientConfig>): PostgreSQLClientConfig {
return {
host: config?.host || postgresConfig.POSTGRES_HOST,
port: config?.port || postgresConfig.POSTGRES_PORT,
database: config?.database || postgresConfig.POSTGRES_DATABASE,
username: config?.username || postgresConfig.POSTGRES_USERNAME,
password: config?.password || postgresConfig.POSTGRES_PASSWORD,
poolSettings: {
min: postgresConfig.POSTGRES_POOL_MIN,
max: postgresConfig.POSTGRES_POOL_MAX,
idleTimeoutMillis: postgresConfig.POSTGRES_POOL_IDLE_TIMEOUT,
...config?.poolSettings,
},
ssl: {
enabled: postgresConfig.POSTGRES_SSL,
rejectUnauthorized: postgresConfig.POSTGRES_SSL_REJECT_UNAUTHORIZED,
...config?.ssl,
},
timeouts: {
query: postgresConfig.POSTGRES_QUERY_TIMEOUT,
connection: postgresConfig.POSTGRES_CONNECTION_TIMEOUT,
statement: postgresConfig.POSTGRES_STATEMENT_TIMEOUT,
lock: postgresConfig.POSTGRES_LOCK_TIMEOUT,
idleInTransaction: postgresConfig.POSTGRES_IDLE_IN_TRANSACTION_SESSION_TIMEOUT,
...config?.timeouts,
},
};
}
private buildPoolConfig(): any {
return {
host: this.config.host,
port: this.config.port,
database: this.config.database,
user: this.config.username,
password: this.config.password,
min: this.config.poolSettings?.min,
max: this.config.poolSettings?.max,
idleTimeoutMillis: this.config.poolSettings?.idleTimeoutMillis,
connectionTimeoutMillis: this.config.timeouts?.connection,
query_timeout: this.config.timeouts?.query,
statement_timeout: this.config.timeouts?.statement,
lock_timeout: this.config.timeouts?.lock,
idle_in_transaction_session_timeout: this.config.timeouts?.idleInTransaction,
ssl: this.config.ssl?.enabled
? {
rejectUnauthorized: this.config.ssl.rejectUnauthorized,
}
: false,
};
}
private setupErrorHandlers(): void {
if (!this.pool) return;
this.pool.on('error', error => {
this.logger.error('PostgreSQL pool error:', error);
});
this.pool.on('connect', () => {
this.logger.debug('New PostgreSQL client connected');
});
this.pool.on('remove', () => {
this.logger.debug('PostgreSQL client removed from pool');
});
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}

View file

@ -1,64 +1,64 @@
import { PostgreSQLClient } from './client';
import { postgresConfig } from '@stock-bot/config';
import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions } from './types';
/**
* Factory function to create a PostgreSQL client instance
*/
export function createPostgreSQLClient(
config?: Partial<PostgreSQLClientConfig>,
options?: PostgreSQLConnectionOptions
): PostgreSQLClient {
return new PostgreSQLClient(config, options);
}
/**
* Create a PostgreSQL client with default configuration
*/
export function createDefaultPostgreSQLClient(): PostgreSQLClient {
const config: Partial<PostgreSQLClientConfig> = {
host: postgresConfig.POSTGRES_HOST,
port: postgresConfig.POSTGRES_PORT,
database: postgresConfig.POSTGRES_DATABASE,
username: postgresConfig.POSTGRES_USERNAME,
password: postgresConfig.POSTGRES_PASSWORD
};
return new PostgreSQLClient(config);
}
/**
* Singleton PostgreSQL client instance
*/
let defaultClient: PostgreSQLClient | null = null;
/**
* Get or create the default PostgreSQL client instance
*/
export function getPostgreSQLClient(): PostgreSQLClient {
if (!defaultClient) {
defaultClient = createDefaultPostgreSQLClient();
}
return defaultClient;
}
/**
* Connect to PostgreSQL using the default client
*/
export async function connectPostgreSQL(): Promise<PostgreSQLClient> {
const client = getPostgreSQLClient();
if (!client.connected) {
await client.connect();
}
return client;
}
/**
* Disconnect from PostgreSQL
*/
export async function disconnectPostgreSQL(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}
import { postgresConfig } from '@stock-bot/config';
import { PostgreSQLClient } from './client';
import type { PostgreSQLClientConfig, PostgreSQLConnectionOptions } from './types';
/**
* Factory function to create a PostgreSQL client instance
*/
export function createPostgreSQLClient(
config?: Partial<PostgreSQLClientConfig>,
options?: PostgreSQLConnectionOptions
): PostgreSQLClient {
return new PostgreSQLClient(config, options);
}
/**
* Create a PostgreSQL client with default configuration
*/
export function createDefaultPostgreSQLClient(): PostgreSQLClient {
const config: Partial<PostgreSQLClientConfig> = {
host: postgresConfig.POSTGRES_HOST,
port: postgresConfig.POSTGRES_PORT,
database: postgresConfig.POSTGRES_DATABASE,
username: postgresConfig.POSTGRES_USERNAME,
password: postgresConfig.POSTGRES_PASSWORD,
};
return new PostgreSQLClient(config);
}
/**
* Singleton PostgreSQL client instance
*/
let defaultClient: PostgreSQLClient | null = null;
/**
* Get or create the default PostgreSQL client instance
*/
export function getPostgreSQLClient(): PostgreSQLClient {
if (!defaultClient) {
defaultClient = createDefaultPostgreSQLClient();
}
return defaultClient;
}
/**
* Connect to PostgreSQL using the default client
*/
export async function connectPostgreSQL(): Promise<PostgreSQLClient> {
const client = getPostgreSQLClient();
if (!client.connected) {
await client.connect();
}
return client;
}
/**
* Disconnect from PostgreSQL
*/
export async function disconnectPostgreSQL(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}

View file

@ -1,142 +1,142 @@
import { getLogger } from '@stock-bot/logger';
import type { PostgreSQLClient } from './client';
import type { PostgreSQLHealthCheck, PostgreSQLHealthStatus, PostgreSQLMetrics } from './types';
/**
* PostgreSQL Health Monitor
*
* Monitors PostgreSQL connection health and provides metrics
*/
export class PostgreSQLHealthMonitor {
private readonly client: PostgreSQLClient;
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private metrics: PostgreSQLMetrics;
private lastHealthCheck: PostgreSQLHealthCheck | null = null;
constructor(client: PostgreSQLClient) {
this.client = client;
this.logger = getLogger('postgres-health-monitor');
this.metrics = {
queriesPerSecond: 0,
averageQueryTime: 0,
errorRate: 0,
connectionPoolUtilization: 0,
slowQueries: 0
};
}
/**
* Start health monitoring
*/
start(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stop();
}
this.logger.info(`Starting PostgreSQL health monitoring (interval: ${intervalMs}ms)`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed:', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed:', error);
});
}
/**
* Stop health monitoring
*/
stop(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Stopped PostgreSQL health monitoring');
}
}
/**
* Get current health status
*/
async getHealth(): Promise<PostgreSQLHealthCheck> {
if (!this.lastHealthCheck) {
await this.performHealthCheck();
}
return this.lastHealthCheck!;
}
/**
* Get current metrics
*/
getMetrics(): PostgreSQLMetrics {
return { ...this.metrics };
}
/**
* Perform a health check
*/
private async performHealthCheck(): Promise<void> {
const startTime = Date.now();
const errors: string[] = [];
let status: PostgreSQLHealthStatus = 'healthy';
try {
if (!this.client.connected) {
errors.push('PostgreSQL client not connected');
status = 'unhealthy';
} else {
// Test basic connectivity
await this.client.query('SELECT 1');
// Get connection stats
const stats = await this.client.getStats();
// Check connection pool utilization
const utilization = parseInt(stats.active_connections) / parseInt(stats.max_connections);
if (utilization > 0.8) {
errors.push('High connection pool utilization');
status = status === 'healthy' ? 'degraded' : status;
}
// Check for high latency
const latency = Date.now() - startTime;
if (latency > 1000) {
errors.push(`High latency: ${latency}ms`);
status = status === 'healthy' ? 'degraded' : status;
}
this.metrics.connectionPoolUtilization = utilization;
}
} catch (error) {
errors.push(`Health check failed: ${(error as Error).message}`);
status = 'unhealthy';
}
const latency = Date.now() - startTime;
this.lastHealthCheck = {
status,
timestamp: new Date(),
latency,
connections: {
active: 1,
idle: 9,
total: 10
},
errors: errors.length > 0 ? errors : undefined
};
// Log health status changes
if (status !== 'healthy') {
this.logger.warn(`PostgreSQL health status: ${status}`, { errors, latency });
} else {
this.logger.debug(`PostgreSQL health check passed (${latency}ms)`);
}
}
}
import { getLogger } from '@stock-bot/logger';
import type { PostgreSQLClient } from './client';
import type { PostgreSQLHealthCheck, PostgreSQLHealthStatus, PostgreSQLMetrics } from './types';
/**
* PostgreSQL Health Monitor
*
* Monitors PostgreSQL connection health and provides metrics
*/
export class PostgreSQLHealthMonitor {
private readonly client: PostgreSQLClient;
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private metrics: PostgreSQLMetrics;
private lastHealthCheck: PostgreSQLHealthCheck | null = null;
constructor(client: PostgreSQLClient) {
this.client = client;
this.logger = getLogger('postgres-health-monitor');
this.metrics = {
queriesPerSecond: 0,
averageQueryTime: 0,
errorRate: 0,
connectionPoolUtilization: 0,
slowQueries: 0,
};
}
/**
* Start health monitoring
*/
start(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stop();
}
this.logger.info(`Starting PostgreSQL health monitoring (interval: ${intervalMs}ms)`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed:', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed:', error);
});
}
/**
* Stop health monitoring
*/
stop(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Stopped PostgreSQL health monitoring');
}
}
/**
* Get current health status
*/
async getHealth(): Promise<PostgreSQLHealthCheck> {
if (!this.lastHealthCheck) {
await this.performHealthCheck();
}
return this.lastHealthCheck!;
}
/**
* Get current metrics
*/
getMetrics(): PostgreSQLMetrics {
return { ...this.metrics };
}
/**
* Perform a health check
*/
private async performHealthCheck(): Promise<void> {
const startTime = Date.now();
const errors: string[] = [];
let status: PostgreSQLHealthStatus = 'healthy';
try {
if (!this.client.connected) {
errors.push('PostgreSQL client not connected');
status = 'unhealthy';
} else {
// Test basic connectivity
await this.client.query('SELECT 1');
// Get connection stats
const stats = await this.client.getStats();
// Check connection pool utilization
const utilization = parseInt(stats.active_connections) / parseInt(stats.max_connections);
if (utilization > 0.8) {
errors.push('High connection pool utilization');
status = status === 'healthy' ? 'degraded' : status;
}
// Check for high latency
const latency = Date.now() - startTime;
if (latency > 1000) {
errors.push(`High latency: ${latency}ms`);
status = status === 'healthy' ? 'degraded' : status;
}
this.metrics.connectionPoolUtilization = utilization;
}
} catch (error) {
errors.push(`Health check failed: ${(error as Error).message}`);
status = 'unhealthy';
}
const latency = Date.now() - startTime;
this.lastHealthCheck = {
status,
timestamp: new Date(),
latency,
connections: {
active: 1,
idle: 9,
total: 10,
},
errors: errors.length > 0 ? errors : undefined,
};
// Log health status changes
if (status !== 'healthy') {
this.logger.warn(`PostgreSQL health status: ${status}`, { errors, latency });
} else {
this.logger.debug(`PostgreSQL health check passed (${latency}ms)`);
}
}
}

View file

@ -1,34 +1,34 @@
/**
* PostgreSQL Client Library for Stock Bot
*
* Provides type-safe PostgreSQL access for operational data,
* transactions, and relational queries.
*/
export { PostgreSQLClient } from './client';
export { PostgreSQLHealthMonitor } from './health';
export { PostgreSQLTransactionManager } from './transactions';
export { PostgreSQLQueryBuilder } from './query-builder';
// export { PostgreSQLMigrationManager } from './migrations'; // TODO: Implement migrations
// Types
export type {
PostgreSQLClientConfig,
PostgreSQLConnectionOptions,
PostgreSQLHealthStatus,
PostgreSQLMetrics,
QueryResult,
TransactionCallback,
SchemaNames,
TableNames,
Trade,
Order,
Position,
Portfolio,
Strategy,
RiskLimit,
AuditLog
} from './types';
// Utils
export { createPostgreSQLClient, getPostgreSQLClient } from './factory';
/**
* PostgreSQL Client Library for Stock Bot
*
* Provides type-safe PostgreSQL access for operational data,
* transactions, and relational queries.
*/
export { PostgreSQLClient } from './client';
export { PostgreSQLHealthMonitor } from './health';
export { PostgreSQLTransactionManager } from './transactions';
export { PostgreSQLQueryBuilder } from './query-builder';
// export { PostgreSQLMigrationManager } from './migrations'; // TODO: Implement migrations
// Types
export type {
PostgreSQLClientConfig,
PostgreSQLConnectionOptions,
PostgreSQLHealthStatus,
PostgreSQLMetrics,
QueryResult,
TransactionCallback,
SchemaNames,
TableNames,
Trade,
Order,
Position,
Portfolio,
Strategy,
RiskLimit,
AuditLog,
} from './types';
// Utils
export { createPostgreSQLClient, getPostgreSQLClient } from './factory';

View file

@ -1,268 +1,270 @@
import type { QueryResultRow } from 'pg';
import type { PostgreSQLClient } from './client';
import type { WhereCondition, JoinCondition, OrderByCondition, QueryResult } from './types';
/**
* PostgreSQL Query Builder
*
* Provides a fluent interface for building SQL queries
*/
export class PostgreSQLQueryBuilder {
private queryType: 'SELECT' | 'INSERT' | 'UPDATE' | 'DELETE' | null = null;
private selectColumns: string[] = [];
private fromTable: string = '';
private joins: JoinCondition[] = [];
private whereConditions: WhereCondition[] = [];
private groupByColumns: string[] = [];
private havingConditions: WhereCondition[] = [];
private orderByConditions: OrderByCondition[] = [];
private limitCount: number | null = null;
private offsetCount: number | null = null;
private insertValues: Record<string, any> = {};
private updateValues: Record<string, any> = {};
private readonly client: PostgreSQLClient;
constructor(client: PostgreSQLClient) {
this.client = client;
}
/**
* SELECT statement
*/
select(columns: string | string[] = '*'): this {
this.queryType = 'SELECT';
this.selectColumns = Array.isArray(columns) ? columns : [columns];
return this;
}
/**
* FROM clause
*/
from(table: string): this {
this.fromTable = table;
return this;
}
/**
* JOIN clause
*/
join(table: string, on: string, type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL' = 'INNER'): this {
this.joins.push({ type, table, on });
return this;
}
/**
* WHERE clause
*/
where(column: string, operator: string, value?: any): this {
this.whereConditions.push({ column, operator: operator as any, value });
return this;
}
/**
* GROUP BY clause
*/
groupBy(columns: string | string[]): this {
this.groupByColumns = Array.isArray(columns) ? columns : [columns];
return this;
}
/**
* ORDER BY clause
*/
orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): this {
this.orderByConditions.push({ column, direction });
return this;
}
/**
* LIMIT clause
*/
limit(count: number): this {
this.limitCount = count;
return this;
}
/**
* OFFSET clause
*/
offset(count: number): this {
this.offsetCount = count;
return this;
}
/**
* INSERT statement
*/
insert(table: string): this {
this.queryType = 'INSERT';
this.fromTable = table;
return this;
}
/**
* VALUES for INSERT
*/
values(data: Record<string, any>): this {
this.insertValues = data;
return this;
}
/**
* UPDATE statement
*/
update(table: string): this {
this.queryType = 'UPDATE';
this.fromTable = table;
return this;
}
/**
* SET for UPDATE
*/
set(data: Record<string, any>): this {
this.updateValues = data;
return this;
}
/**
* DELETE statement
*/
delete(table: string): this {
this.queryType = 'DELETE';
this.fromTable = table;
return this;
}
/**
* Build and execute the query
*/
async execute<T extends QueryResultRow = any>(): Promise<QueryResult<T>> {
const { sql, params } = this.build();
return await this.client.query<T>(sql, params);
}
/**
* Build the SQL query
*/
build(): { sql: string; params: any[] } {
const params: any[] = [];
let sql = '';
switch (this.queryType) {
case 'SELECT':
sql = this.buildSelectQuery(params);
break;
case 'INSERT':
sql = this.buildInsertQuery(params);
break;
case 'UPDATE':
sql = this.buildUpdateQuery(params);
break;
case 'DELETE':
sql = this.buildDeleteQuery(params);
break;
default:
throw new Error('Query type not specified');
}
return { sql, params };
}
private buildSelectQuery(params: any[]): string {
let sql = `SELECT ${this.selectColumns.join(', ')}`;
if (this.fromTable) {
sql += ` FROM ${this.fromTable}`;
}
// Add JOINs
for (const join of this.joins) {
sql += ` ${join.type} JOIN ${join.table} ON ${join.on}`;
}
// Add WHERE
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
// Add GROUP BY
if (this.groupByColumns.length > 0) {
sql += ` GROUP BY ${this.groupByColumns.join(', ')}`;
}
// Add HAVING
if (this.havingConditions.length > 0) {
sql += ' HAVING ' + this.buildWhereClause(this.havingConditions, params);
}
// Add ORDER BY
if (this.orderByConditions.length > 0) {
const orderBy = this.orderByConditions
.map(order => `${order.column} ${order.direction}`)
.join(', ');
sql += ` ORDER BY ${orderBy}`;
}
// Add LIMIT
if (this.limitCount !== null) {
sql += ` LIMIT $${params.length + 1}`;
params.push(this.limitCount);
}
// Add OFFSET
if (this.offsetCount !== null) {
sql += ` OFFSET $${params.length + 1}`;
params.push(this.offsetCount);
}
return sql;
}
private buildInsertQuery(params: any[]): string {
const columns = Object.keys(this.insertValues);
const placeholders = columns.map((_, i) => `$${params.length + i + 1}`);
params.push(...Object.values(this.insertValues));
return `INSERT INTO ${this.fromTable} (${columns.join(', ')}) VALUES (${placeholders.join(', ')})`;
}
private buildUpdateQuery(params: any[]): string {
const sets = Object.keys(this.updateValues).map((key, i) => {
return `${key} = $${params.length + i + 1}`;
});
params.push(...Object.values(this.updateValues));
let sql = `UPDATE ${this.fromTable} SET ${sets.join(', ')}`;
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
return sql;
}
private buildDeleteQuery(params: any[]): string {
let sql = `DELETE FROM ${this.fromTable}`;
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
return sql;
}
private buildWhereClause(conditions: WhereCondition[], params: any[]): string {
return conditions.map(condition => {
if (condition.operator === 'IS NULL' || condition.operator === 'IS NOT NULL') {
return `${condition.column} ${condition.operator}`;
} else {
params.push(condition.value);
return `${condition.column} ${condition.operator} $${params.length}`;
}
}).join(' AND ');
}
}
import type { QueryResultRow } from 'pg';
import type { PostgreSQLClient } from './client';
import type { JoinCondition, OrderByCondition, QueryResult, WhereCondition } from './types';
/**
* PostgreSQL Query Builder
*
* Provides a fluent interface for building SQL queries
*/
export class PostgreSQLQueryBuilder {
private queryType: 'SELECT' | 'INSERT' | 'UPDATE' | 'DELETE' | null = null;
private selectColumns: string[] = [];
private fromTable: string = '';
private joins: JoinCondition[] = [];
private whereConditions: WhereCondition[] = [];
private groupByColumns: string[] = [];
private havingConditions: WhereCondition[] = [];
private orderByConditions: OrderByCondition[] = [];
private limitCount: number | null = null;
private offsetCount: number | null = null;
private insertValues: Record<string, any> = {};
private updateValues: Record<string, any> = {};
private readonly client: PostgreSQLClient;
constructor(client: PostgreSQLClient) {
this.client = client;
}
/**
* SELECT statement
*/
select(columns: string | string[] = '*'): this {
this.queryType = 'SELECT';
this.selectColumns = Array.isArray(columns) ? columns : [columns];
return this;
}
/**
* FROM clause
*/
from(table: string): this {
this.fromTable = table;
return this;
}
/**
* JOIN clause
*/
join(table: string, on: string, type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL' = 'INNER'): this {
this.joins.push({ type, table, on });
return this;
}
/**
* WHERE clause
*/
where(column: string, operator: string, value?: any): this {
this.whereConditions.push({ column, operator: operator as any, value });
return this;
}
/**
* GROUP BY clause
*/
groupBy(columns: string | string[]): this {
this.groupByColumns = Array.isArray(columns) ? columns : [columns];
return this;
}
/**
* ORDER BY clause
*/
orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): this {
this.orderByConditions.push({ column, direction });
return this;
}
/**
* LIMIT clause
*/
limit(count: number): this {
this.limitCount = count;
return this;
}
/**
* OFFSET clause
*/
offset(count: number): this {
this.offsetCount = count;
return this;
}
/**
* INSERT statement
*/
insert(table: string): this {
this.queryType = 'INSERT';
this.fromTable = table;
return this;
}
/**
* VALUES for INSERT
*/
values(data: Record<string, any>): this {
this.insertValues = data;
return this;
}
/**
* UPDATE statement
*/
update(table: string): this {
this.queryType = 'UPDATE';
this.fromTable = table;
return this;
}
/**
* SET for UPDATE
*/
set(data: Record<string, any>): this {
this.updateValues = data;
return this;
}
/**
* DELETE statement
*/
delete(table: string): this {
this.queryType = 'DELETE';
this.fromTable = table;
return this;
}
/**
* Build and execute the query
*/
async execute<T extends QueryResultRow = any>(): Promise<QueryResult<T>> {
const { sql, params } = this.build();
return await this.client.query<T>(sql, params);
}
/**
* Build the SQL query
*/
build(): { sql: string; params: any[] } {
const params: any[] = [];
let sql = '';
switch (this.queryType) {
case 'SELECT':
sql = this.buildSelectQuery(params);
break;
case 'INSERT':
sql = this.buildInsertQuery(params);
break;
case 'UPDATE':
sql = this.buildUpdateQuery(params);
break;
case 'DELETE':
sql = this.buildDeleteQuery(params);
break;
default:
throw new Error('Query type not specified');
}
return { sql, params };
}
private buildSelectQuery(params: any[]): string {
let sql = `SELECT ${this.selectColumns.join(', ')}`;
if (this.fromTable) {
sql += ` FROM ${this.fromTable}`;
}
// Add JOINs
for (const join of this.joins) {
sql += ` ${join.type} JOIN ${join.table} ON ${join.on}`;
}
// Add WHERE
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
// Add GROUP BY
if (this.groupByColumns.length > 0) {
sql += ` GROUP BY ${this.groupByColumns.join(', ')}`;
}
// Add HAVING
if (this.havingConditions.length > 0) {
sql += ' HAVING ' + this.buildWhereClause(this.havingConditions, params);
}
// Add ORDER BY
if (this.orderByConditions.length > 0) {
const orderBy = this.orderByConditions
.map(order => `${order.column} ${order.direction}`)
.join(', ');
sql += ` ORDER BY ${orderBy}`;
}
// Add LIMIT
if (this.limitCount !== null) {
sql += ` LIMIT $${params.length + 1}`;
params.push(this.limitCount);
}
// Add OFFSET
if (this.offsetCount !== null) {
sql += ` OFFSET $${params.length + 1}`;
params.push(this.offsetCount);
}
return sql;
}
private buildInsertQuery(params: any[]): string {
const columns = Object.keys(this.insertValues);
const placeholders = columns.map((_, i) => `$${params.length + i + 1}`);
params.push(...Object.values(this.insertValues));
return `INSERT INTO ${this.fromTable} (${columns.join(', ')}) VALUES (${placeholders.join(', ')})`;
}
private buildUpdateQuery(params: any[]): string {
const sets = Object.keys(this.updateValues).map((key, i) => {
return `${key} = $${params.length + i + 1}`;
});
params.push(...Object.values(this.updateValues));
let sql = `UPDATE ${this.fromTable} SET ${sets.join(', ')}`;
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
return sql;
}
private buildDeleteQuery(params: any[]): string {
let sql = `DELETE FROM ${this.fromTable}`;
if (this.whereConditions.length > 0) {
sql += ' WHERE ' + this.buildWhereClause(this.whereConditions, params);
}
return sql;
}
private buildWhereClause(conditions: WhereCondition[], params: any[]): string {
return conditions
.map(condition => {
if (condition.operator === 'IS NULL' || condition.operator === 'IS NOT NULL') {
return `${condition.column} ${condition.operator}`;
} else {
params.push(condition.value);
return `${condition.column} ${condition.operator} $${params.length}`;
}
})
.join(' AND ');
}
}

View file

@ -1,57 +1,56 @@
import { PoolClient } from 'pg';
import { getLogger } from '@stock-bot/logger';
import type { PostgreSQLClient } from './client';
import type { TransactionCallback } from './types';
/**
* PostgreSQL Transaction Manager
*
* Provides transaction support for multi-statement operations
*/
export class PostgreSQLTransactionManager {
private readonly client: PostgreSQLClient;
private readonly logger: ReturnType<typeof getLogger>;
constructor(client: PostgreSQLClient) {
this.client = client;
this.logger = getLogger('postgres-transaction-manager');
}
/**
* Execute operations within a transaction
*/
async execute<T>(callback: TransactionCallback<T>): Promise<T> {
const pool = this.client.connectionPool;
if (!pool) {
throw new Error('PostgreSQL client not connected');
}
const client = await pool.connect();
try {
this.logger.debug('Starting PostgreSQL transaction');
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
this.logger.debug('PostgreSQL transaction committed successfully');
return result;
} catch (error) {
this.logger.error('PostgreSQL transaction failed, rolling back:', error);
try {
await client.query('ROLLBACK');
} catch (rollbackError) {
this.logger.error('Failed to rollback transaction:', rollbackError);
}
throw error;
} finally {
client.release();
}
}
}
import { PoolClient } from 'pg';
import { getLogger } from '@stock-bot/logger';
import type { PostgreSQLClient } from './client';
import type { TransactionCallback } from './types';
/**
* PostgreSQL Transaction Manager
*
* Provides transaction support for multi-statement operations
*/
export class PostgreSQLTransactionManager {
private readonly client: PostgreSQLClient;
private readonly logger: ReturnType<typeof getLogger>;
constructor(client: PostgreSQLClient) {
this.client = client;
this.logger = getLogger('postgres-transaction-manager');
}
/**
* Execute operations within a transaction
*/
async execute<T>(callback: TransactionCallback<T>): Promise<T> {
const pool = this.client.connectionPool;
if (!pool) {
throw new Error('PostgreSQL client not connected');
}
const client = await pool.connect();
try {
this.logger.debug('Starting PostgreSQL transaction');
await client.query('BEGIN');
const result = await callback(client);
await client.query('COMMIT');
this.logger.debug('PostgreSQL transaction committed successfully');
return result;
} catch (error) {
this.logger.error('PostgreSQL transaction failed, rolling back:', error);
try {
await client.query('ROLLBACK');
} catch (rollbackError) {
this.logger.error('Failed to rollback transaction:', rollbackError);
}
throw error;
} finally {
client.release();
}
}
}

View file

@ -1,206 +1,218 @@
import type { Pool, PoolClient, QueryResult as PgQueryResult, QueryResultRow } from 'pg';
/**
* PostgreSQL Client Configuration
*/
export interface PostgreSQLClientConfig {
host: string;
port: number;
database: string;
username: string;
password: string;
poolSettings?: {
min: number;
max: number;
idleTimeoutMillis: number;
};
ssl?: {
enabled: boolean;
rejectUnauthorized: boolean;
};
timeouts?: {
query: number;
connection: number;
statement: number;
lock: number;
idleInTransaction: number;
};
}
/**
* PostgreSQL Connection Options
*/
export interface PostgreSQLConnectionOptions {
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type PostgreSQLHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface PostgreSQLHealthCheck {
status: PostgreSQLHealthStatus;
timestamp: Date;
latency: number;
connections: {
active: number;
idle: number;
total: number;
};
errors?: string[];
}
export interface PostgreSQLMetrics {
queriesPerSecond: number;
averageQueryTime: number;
errorRate: number;
connectionPoolUtilization: number;
slowQueries: number;
}
/**
* Query Result Types
*/
export interface QueryResult<T extends QueryResultRow = any> extends PgQueryResult<T> {
executionTime?: number;
}
export type TransactionCallback<T> = (client: PoolClient) => Promise<T>;
/**
* Schema and Table Names
*/
export type SchemaNames = 'trading' | 'strategy' | 'risk' | 'audit';
export type TableNames =
| 'trades'
| 'orders'
| 'positions'
| 'portfolios'
| 'strategies'
| 'risk_limits'
| 'audit_logs'
| 'users'
| 'accounts'
| 'symbols'
| 'exchanges';
/**
* Trading Domain Types
*/
export interface Trade {
id: string;
order_id: string;
symbol: string;
side: 'buy' | 'sell';
quantity: number;
price: number;
executed_at: Date;
commission: number;
fees: number;
portfolio_id: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
}
export interface Order {
id: string;
symbol: string;
side: 'buy' | 'sell';
type: 'market' | 'limit' | 'stop' | 'stop_limit';
quantity: number;
price?: number;
stop_price?: number;
status: 'pending' | 'filled' | 'cancelled' | 'rejected';
portfolio_id: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
expires_at?: Date;
}
export interface Position {
id: string;
symbol: string;
quantity: number;
average_cost: number;
market_value: number;
unrealized_pnl: number;
realized_pnl: number;
portfolio_id: string;
created_at: Date;
updated_at: Date;
}
export interface Portfolio {
id: string;
name: string;
cash_balance: number;
total_value: number;
unrealized_pnl: number;
realized_pnl: number;
user_id: string;
created_at: Date;
updated_at: Date;
}
export interface Strategy {
id: string;
name: string;
description: string;
parameters: Record<string, any>;
status: 'active' | 'inactive' | 'paused';
performance_metrics: Record<string, number>;
portfolio_id: string;
created_at: Date;
updated_at: Date;
}
export interface RiskLimit {
id: string;
type: 'position_size' | 'daily_loss' | 'max_drawdown' | 'concentration';
value: number;
threshold: number;
status: 'active' | 'breached' | 'disabled';
portfolio_id?: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
}
export interface AuditLog {
id: string;
action: string;
entity_type: string;
entity_id: string;
old_values?: Record<string, any>;
new_values?: Record<string, any>;
user_id?: string;
ip_address?: string;
user_agent?: string;
timestamp: Date;
}
/**
* Query Builder Types
*/
export interface WhereCondition {
column: string;
operator: '=' | '!=' | '>' | '<' | '>=' | '<=' | 'IN' | 'NOT IN' | 'LIKE' | 'ILIKE' | 'IS NULL' | 'IS NOT NULL';
value?: any;
}
export interface JoinCondition {
type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL';
table: string;
on: string;
}
export interface OrderByCondition {
column: string;
direction: 'ASC' | 'DESC';
}
import type { QueryResult as PgQueryResult, Pool, PoolClient, QueryResultRow } from 'pg';
/**
* PostgreSQL Client Configuration
*/
export interface PostgreSQLClientConfig {
host: string;
port: number;
database: string;
username: string;
password: string;
poolSettings?: {
min: number;
max: number;
idleTimeoutMillis: number;
};
ssl?: {
enabled: boolean;
rejectUnauthorized: boolean;
};
timeouts?: {
query: number;
connection: number;
statement: number;
lock: number;
idleInTransaction: number;
};
}
/**
* PostgreSQL Connection Options
*/
export interface PostgreSQLConnectionOptions {
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type PostgreSQLHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface PostgreSQLHealthCheck {
status: PostgreSQLHealthStatus;
timestamp: Date;
latency: number;
connections: {
active: number;
idle: number;
total: number;
};
errors?: string[];
}
export interface PostgreSQLMetrics {
queriesPerSecond: number;
averageQueryTime: number;
errorRate: number;
connectionPoolUtilization: number;
slowQueries: number;
}
/**
* Query Result Types
*/
export interface QueryResult<T extends QueryResultRow = any> extends PgQueryResult<T> {
executionTime?: number;
}
export type TransactionCallback<T> = (client: PoolClient) => Promise<T>;
/**
* Schema and Table Names
*/
export type SchemaNames = 'trading' | 'strategy' | 'risk' | 'audit';
export type TableNames =
| 'trades'
| 'orders'
| 'positions'
| 'portfolios'
| 'strategies'
| 'risk_limits'
| 'audit_logs'
| 'users'
| 'accounts'
| 'symbols'
| 'exchanges';
/**
* Trading Domain Types
*/
export interface Trade {
id: string;
order_id: string;
symbol: string;
side: 'buy' | 'sell';
quantity: number;
price: number;
executed_at: Date;
commission: number;
fees: number;
portfolio_id: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
}
export interface Order {
id: string;
symbol: string;
side: 'buy' | 'sell';
type: 'market' | 'limit' | 'stop' | 'stop_limit';
quantity: number;
price?: number;
stop_price?: number;
status: 'pending' | 'filled' | 'cancelled' | 'rejected';
portfolio_id: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
expires_at?: Date;
}
export interface Position {
id: string;
symbol: string;
quantity: number;
average_cost: number;
market_value: number;
unrealized_pnl: number;
realized_pnl: number;
portfolio_id: string;
created_at: Date;
updated_at: Date;
}
export interface Portfolio {
id: string;
name: string;
cash_balance: number;
total_value: number;
unrealized_pnl: number;
realized_pnl: number;
user_id: string;
created_at: Date;
updated_at: Date;
}
export interface Strategy {
id: string;
name: string;
description: string;
parameters: Record<string, any>;
status: 'active' | 'inactive' | 'paused';
performance_metrics: Record<string, number>;
portfolio_id: string;
created_at: Date;
updated_at: Date;
}
export interface RiskLimit {
id: string;
type: 'position_size' | 'daily_loss' | 'max_drawdown' | 'concentration';
value: number;
threshold: number;
status: 'active' | 'breached' | 'disabled';
portfolio_id?: string;
strategy_id?: string;
created_at: Date;
updated_at: Date;
}
export interface AuditLog {
id: string;
action: string;
entity_type: string;
entity_id: string;
old_values?: Record<string, any>;
new_values?: Record<string, any>;
user_id?: string;
ip_address?: string;
user_agent?: string;
timestamp: Date;
}
/**
* Query Builder Types
*/
export interface WhereCondition {
column: string;
operator:
| '='
| '!='
| '>'
| '<'
| '>='
| '<='
| 'IN'
| 'NOT IN'
| 'LIKE'
| 'ILIKE'
| 'IS NULL'
| 'IS NOT NULL';
value?: any;
}
export interface JoinCondition {
type: 'INNER' | 'LEFT' | 'RIGHT' | 'FULL';
table: string;
on: string;
}
export interface OrderByCondition {
column: string;
direction: 'ASC' | 'DESC';
}

View file

@ -1,471 +1,476 @@
import { Pool } from 'pg';
import { questdbConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import type {
QuestDBClientConfig,
QuestDBConnectionOptions,
QueryResult,
InsertResult,
BaseTimeSeriesData,
TableNames
} from './types';
import { QuestDBHealthMonitor } from './health';
import { QuestDBQueryBuilder } from './query-builder';
import { QuestDBInfluxWriter } from './influx-writer';
import { QuestDBSchemaManager } from './schema';
/**
* QuestDB Client for Stock Bot
*
* Provides high-performance time-series data access with support for
* multiple protocols (HTTP, PostgreSQL, InfluxDB Line Protocol).
*/
export class QuestDBClient {
private pgPool: Pool | null = null;
private readonly config: QuestDBClientConfig;
private readonly options: QuestDBConnectionOptions;
private readonly logger = getLogger('QuestDBClient');
private readonly healthMonitor: QuestDBHealthMonitor;
private readonly influxWriter: QuestDBInfluxWriter;
private readonly schemaManager: QuestDBSchemaManager;
private isConnected = false;
constructor(
config?: Partial<QuestDBClientConfig>,
options?: QuestDBConnectionOptions
) {
this.config = this.buildConfig(config);
this.options = {
protocol: 'pg',
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options
};
this.healthMonitor = new QuestDBHealthMonitor(this);
this.influxWriter = new QuestDBInfluxWriter(this);
this.schemaManager = new QuestDBSchemaManager(this);
}
/**
* Connect to QuestDB
*/
async connect(): Promise<void> {
if (this.isConnected) {
return;
}
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(`Connecting to QuestDB (attempt ${attempt}/${this.options.retryAttempts})...`);
// Connect via PostgreSQL wire protocol
this.pgPool = new Pool(this.buildPgPoolConfig());
// Test the connection
const client = await this.pgPool.connect();
await client.query('SELECT 1');
client.release();
this.isConnected = true;
this.logger.info('Successfully connected to QuestDB');
// Initialize schema
await this.schemaManager.initializeDatabase();
// Start health monitoring
this.healthMonitor.startMonitoring();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`QuestDB connection attempt ${attempt} failed:`, error);
if (this.pgPool) {
await this.pgPool.end();
this.pgPool = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(`Failed to connect to QuestDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`);
}
/**
* Disconnect from QuestDB
*/
async disconnect(): Promise<void> {
if (!this.isConnected) {
return;
} try {
this.healthMonitor.stopMonitoring();
if (this.pgPool) {
await this.pgPool.end();
this.pgPool = null;
}
this.isConnected = false;
this.logger.info('Disconnected from QuestDB');
} catch (error) {
this.logger.error('Error disconnecting from QuestDB:', error);
throw error;
}
}
/**
* Execute a SQL query
*/
async query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>> {
if (!this.pgPool) {
throw new Error('QuestDB client not connected');
}
const startTime = Date.now();
try {
const result = await this.pgPool.query(sql, params);
const executionTime = Date.now() - startTime;
this.logger.debug(`Query executed in ${executionTime}ms`, {
query: sql.substring(0, 100),
rowCount: result.rowCount
});
return {
rows: result.rows,
rowCount: result.rowCount || 0,
executionTime, metadata: {
columns: result.fields?.map((field: any) => ({
name: field.name,
type: this.mapDataType(field.dataTypeID)
})) || []
}
};
} catch (error) {
const executionTime = Date.now() - startTime;
this.logger.error(`Query failed after ${executionTime}ms:`, {
error: (error as Error).message,
query: sql,
params
});
throw error;
}
}
/**
* Write OHLCV data using InfluxDB Line Protocol
*/
async writeOHLCV(
symbol: string,
exchange: string,
data: Array<{
timestamp: Date;
open: number;
high: number;
low: number;
close: number;
volume: number;
}>
): Promise<void> {
return await this.influxWriter.writeOHLCV(symbol, exchange, data);
}
/**
* Write market analytics data
*/
async writeMarketAnalytics(
symbol: string,
exchange: string,
analytics: {
timestamp: Date;
rsi?: number;
macd?: number;
signal?: number;
histogram?: number;
bollinger_upper?: number;
bollinger_lower?: number;
volume_sma?: number;
}
): Promise<void> {
return await this.influxWriter.writeMarketAnalytics(symbol, exchange, analytics);
}
/**
* Get a query builder instance
*/
queryBuilder(): QuestDBQueryBuilder {
return new QuestDBQueryBuilder(this);
}
/**
* Create a SELECT query builder
*/
select(...columns: string[]): QuestDBQueryBuilder {
return this.queryBuilder().select(...columns);
}
/**
* Create an aggregation query builder
*/
aggregate(table: TableNames): QuestDBQueryBuilder {
return this.queryBuilder().from(table);
}
/**
* Execute a time-series specific query with SAMPLE BY
*/
async sampleBy<T = any>(
table: TableNames,
columns: string[],
interval: string,
timeColumn: string = 'timestamp',
where?: string,
params?: any[]
): Promise<QueryResult<T>> {
const columnsStr = columns.join(', ');
const whereClause = where ? `WHERE ${where}` : '';
const sql = `
SELECT ${columnsStr}
FROM ${table}
${whereClause}
SAMPLE BY ${interval}
ALIGN TO CALENDAR
`;
return await this.query<T>(sql, params);
}
/**
* Get latest values by symbol using LATEST BY
*/
async latestBy<T = any>(
table: TableNames,
columns: string | string[] = '*',
keyColumns: string | string[] = 'symbol'
): Promise<QueryResult<T>> {
const columnsStr = Array.isArray(columns) ? columns.join(', ') : columns;
const keyColumnsStr = Array.isArray(keyColumns) ? keyColumns.join(', ') : keyColumns;
const sql = `
SELECT ${columnsStr}
FROM ${table}
LATEST BY ${keyColumnsStr}
`;
return await this.query<T>(sql);
}
/**
* Execute ASOF JOIN for time-series correlation
*/
async asofJoin<T = any>(
leftTable: TableNames,
rightTable: TableNames,
joinCondition: string,
columns?: string[],
where?: string,
params?: any[]
): Promise<QueryResult<T>> {
const columnsStr = columns ? columns.join(', ') : '*';
const whereClause = where ? `WHERE ${where}` : '';
const sql = `
SELECT ${columnsStr}
FROM ${leftTable}
ASOF JOIN ${rightTable} ON ${joinCondition}
${whereClause}
`;
return await this.query<T>(sql, params);
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
const result = await this.query(`
SELECT
table_name,
row_count,
partition_count,
size_bytes
FROM tables()
WHERE table_name NOT LIKE 'sys.%'
ORDER BY row_count DESC
`);
return result.rows;
}
/**
* Get table information
*/
async getTableInfo(tableName: string): Promise<any> {
const result = await this.query(
`SELECT * FROM table_columns WHERE table_name = ?`,
[tableName]
);
return result.rows;
}
/**
* Check if PostgreSQL pool is healthy
*/
isPgPoolHealthy(): boolean {
return this.pgPool !== null && !this.pgPool.ended;
}
/**
* Get HTTP endpoint URL
*/
getHttpUrl(): string {
const protocol = this.config.tls?.enabled ? 'https' : 'http';
return `${protocol}://${this.config.host}:${this.config.httpPort}`;
}
/**
* Get InfluxDB endpoint URL
*/
getInfluxUrl(): string {
const protocol = this.config.tls?.enabled ? 'https' : 'http';
return `${protocol}://${this.config.host}:${this.config.influxPort}`;
}
/**
* Get health monitor instance
*/
getHealthMonitor(): QuestDBHealthMonitor {
return this.healthMonitor;
}
/**
* Get schema manager instance
*/
getSchemaManager(): QuestDBSchemaManager {
return this.schemaManager;
}
/**
* Get InfluxDB writer instance
*/
getInfluxWriter(): QuestDBInfluxWriter {
return this.influxWriter;
}
/**
* Optimize table by rebuilding partitions
*/
async optimizeTable(tableName: string): Promise<void> {
await this.query(`VACUUM TABLE ${tableName}`);
this.logger.info(`Optimized table: ${tableName}`);
}
/**
* Create a table with time-series optimizations
*/
async createTable(
tableName: string,
columns: string,
partitionBy: string = 'DAY',
timestampColumn: string = 'timestamp'
): Promise<void> {
const sql = `
CREATE TABLE IF NOT EXISTS ${tableName} (
${columns}
) TIMESTAMP(${timestampColumn}) PARTITION BY ${partitionBy}
`;
await this.query(sql);
this.logger.info(`Created table: ${tableName}`);
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.pgPool;
}
/**
* Get the PostgreSQL connection pool
*/
get connectionPool(): Pool | null {
return this.pgPool;
}
/**
* Get configuration
*/
get configuration(): QuestDBClientConfig {
return { ...this.config };
}
private buildConfig(config?: Partial<QuestDBClientConfig>): QuestDBClientConfig {
return {
host: config?.host || questdbConfig.QUESTDB_HOST,
httpPort: config?.httpPort || questdbConfig.QUESTDB_HTTP_PORT,
pgPort: config?.pgPort || questdbConfig.QUESTDB_PG_PORT,
influxPort: config?.influxPort || questdbConfig.QUESTDB_INFLUX_PORT,
user: config?.user || questdbConfig.QUESTDB_USER,
password: config?.password || questdbConfig.QUESTDB_PASSWORD,
database: config?.database || questdbConfig.QUESTDB_DEFAULT_DATABASE,
tls: {
enabled: questdbConfig.QUESTDB_TLS_ENABLED,
verifyServerCert: questdbConfig.QUESTDB_TLS_VERIFY_SERVER_CERT,
...config?.tls
},
timeouts: {
connection: questdbConfig.QUESTDB_CONNECTION_TIMEOUT,
request: questdbConfig.QUESTDB_REQUEST_TIMEOUT,
...config?.timeouts
},
retryAttempts: questdbConfig.QUESTDB_RETRY_ATTEMPTS,
...config
};
}
private buildPgPoolConfig(): any {
return {
host: this.config.host,
port: this.config.pgPort,
database: this.config.database,
user: this.config.user,
password: this.config.password,
connectionTimeoutMillis: this.config.timeouts?.connection,
query_timeout: this.config.timeouts?.request,
ssl: this.config.tls?.enabled ? {
rejectUnauthorized: this.config.tls.verifyServerCert
} : false,
min: 2,
max: 10
};
}
private mapDataType(typeId: number): string {
// Map PostgreSQL type IDs to QuestDB types
const typeMap: Record<number, string> = {
16: 'BOOLEAN',
20: 'LONG',
21: 'INT',
23: 'INT',
25: 'STRING',
700: 'FLOAT',
701: 'DOUBLE',
1043: 'STRING',
1082: 'DATE',
1114: 'TIMESTAMP',
1184: 'TIMESTAMP'
};
return typeMap[typeId] || 'STRING';
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}
import { Pool } from 'pg';
import { questdbConfig } from '@stock-bot/config';
import { getLogger } from '@stock-bot/logger';
import { QuestDBHealthMonitor } from './health';
import { QuestDBInfluxWriter } from './influx-writer';
import { QuestDBQueryBuilder } from './query-builder';
import { QuestDBSchemaManager } from './schema';
import type {
BaseTimeSeriesData,
InsertResult,
QueryResult,
QuestDBClientConfig,
QuestDBConnectionOptions,
TableNames,
} from './types';
/**
* QuestDB Client for Stock Bot
*
* Provides high-performance time-series data access with support for
* multiple protocols (HTTP, PostgreSQL, InfluxDB Line Protocol).
*/
export class QuestDBClient {
private pgPool: Pool | null = null;
private readonly config: QuestDBClientConfig;
private readonly options: QuestDBConnectionOptions;
private readonly logger = getLogger('QuestDBClient');
private readonly healthMonitor: QuestDBHealthMonitor;
private readonly influxWriter: QuestDBInfluxWriter;
private readonly schemaManager: QuestDBSchemaManager;
private isConnected = false;
constructor(config?: Partial<QuestDBClientConfig>, options?: QuestDBConnectionOptions) {
this.config = this.buildConfig(config);
this.options = {
protocol: 'pg',
retryAttempts: 3,
retryDelay: 1000,
healthCheckInterval: 30000,
...options,
};
this.healthMonitor = new QuestDBHealthMonitor(this);
this.influxWriter = new QuestDBInfluxWriter(this);
this.schemaManager = new QuestDBSchemaManager(this);
}
/**
* Connect to QuestDB
*/
async connect(): Promise<void> {
if (this.isConnected) {
return;
}
let lastError: Error | null = null;
for (let attempt = 1; attempt <= this.options.retryAttempts!; attempt++) {
try {
this.logger.info(
`Connecting to QuestDB (attempt ${attempt}/${this.options.retryAttempts})...`
);
// Connect via PostgreSQL wire protocol
this.pgPool = new Pool(this.buildPgPoolConfig());
// Test the connection
const client = await this.pgPool.connect();
await client.query('SELECT 1');
client.release();
this.isConnected = true;
this.logger.info('Successfully connected to QuestDB');
// Initialize schema
await this.schemaManager.initializeDatabase();
// Start health monitoring
this.healthMonitor.startMonitoring();
return;
} catch (error) {
lastError = error as Error;
this.logger.error(`QuestDB connection attempt ${attempt} failed:`, error);
if (this.pgPool) {
await this.pgPool.end();
this.pgPool = null;
}
if (attempt < this.options.retryAttempts!) {
await this.delay(this.options.retryDelay! * attempt);
}
}
}
throw new Error(
`Failed to connect to QuestDB after ${this.options.retryAttempts} attempts: ${lastError?.message}`
);
}
/**
* Disconnect from QuestDB
*/
async disconnect(): Promise<void> {
if (!this.isConnected) {
return;
}
try {
this.healthMonitor.stopMonitoring();
if (this.pgPool) {
await this.pgPool.end();
this.pgPool = null;
}
this.isConnected = false;
this.logger.info('Disconnected from QuestDB');
} catch (error) {
this.logger.error('Error disconnecting from QuestDB:', error);
throw error;
}
}
/**
* Execute a SQL query
*/
async query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>> {
if (!this.pgPool) {
throw new Error('QuestDB client not connected');
}
const startTime = Date.now();
try {
const result = await this.pgPool.query(sql, params);
const executionTime = Date.now() - startTime;
this.logger.debug(`Query executed in ${executionTime}ms`, {
query: sql.substring(0, 100),
rowCount: result.rowCount,
});
return {
rows: result.rows,
rowCount: result.rowCount || 0,
executionTime,
metadata: {
columns:
result.fields?.map((field: any) => ({
name: field.name,
type: this.mapDataType(field.dataTypeID),
})) || [],
},
};
} catch (error) {
const executionTime = Date.now() - startTime;
this.logger.error(`Query failed after ${executionTime}ms:`, {
error: (error as Error).message,
query: sql,
params,
});
throw error;
}
}
/**
* Write OHLCV data using InfluxDB Line Protocol
*/
async writeOHLCV(
symbol: string,
exchange: string,
data: Array<{
timestamp: Date;
open: number;
high: number;
low: number;
close: number;
volume: number;
}>
): Promise<void> {
return await this.influxWriter.writeOHLCV(symbol, exchange, data);
}
/**
* Write market analytics data
*/
async writeMarketAnalytics(
symbol: string,
exchange: string,
analytics: {
timestamp: Date;
rsi?: number;
macd?: number;
signal?: number;
histogram?: number;
bollinger_upper?: number;
bollinger_lower?: number;
volume_sma?: number;
}
): Promise<void> {
return await this.influxWriter.writeMarketAnalytics(symbol, exchange, analytics);
}
/**
* Get a query builder instance
*/
queryBuilder(): QuestDBQueryBuilder {
return new QuestDBQueryBuilder(this);
}
/**
* Create a SELECT query builder
*/
select(...columns: string[]): QuestDBQueryBuilder {
return this.queryBuilder().select(...columns);
}
/**
* Create an aggregation query builder
*/
aggregate(table: TableNames): QuestDBQueryBuilder {
return this.queryBuilder().from(table);
}
/**
* Execute a time-series specific query with SAMPLE BY
*/
async sampleBy<T = any>(
table: TableNames,
columns: string[],
interval: string,
timeColumn: string = 'timestamp',
where?: string,
params?: any[]
): Promise<QueryResult<T>> {
const columnsStr = columns.join(', ');
const whereClause = where ? `WHERE ${where}` : '';
const sql = `
SELECT ${columnsStr}
FROM ${table}
${whereClause}
SAMPLE BY ${interval}
ALIGN TO CALENDAR
`;
return await this.query<T>(sql, params);
}
/**
* Get latest values by symbol using LATEST BY
*/
async latestBy<T = any>(
table: TableNames,
columns: string | string[] = '*',
keyColumns: string | string[] = 'symbol'
): Promise<QueryResult<T>> {
const columnsStr = Array.isArray(columns) ? columns.join(', ') : columns;
const keyColumnsStr = Array.isArray(keyColumns) ? keyColumns.join(', ') : keyColumns;
const sql = `
SELECT ${columnsStr}
FROM ${table}
LATEST BY ${keyColumnsStr}
`;
return await this.query<T>(sql);
}
/**
* Execute ASOF JOIN for time-series correlation
*/
async asofJoin<T = any>(
leftTable: TableNames,
rightTable: TableNames,
joinCondition: string,
columns?: string[],
where?: string,
params?: any[]
): Promise<QueryResult<T>> {
const columnsStr = columns ? columns.join(', ') : '*';
const whereClause = where ? `WHERE ${where}` : '';
const sql = `
SELECT ${columnsStr}
FROM ${leftTable}
ASOF JOIN ${rightTable} ON ${joinCondition}
${whereClause}
`;
return await this.query<T>(sql, params);
}
/**
* Get database statistics
*/
async getStats(): Promise<any> {
const result = await this.query(`
SELECT
table_name,
row_count,
partition_count,
size_bytes
FROM tables()
WHERE table_name NOT LIKE 'sys.%'
ORDER BY row_count DESC
`);
return result.rows;
}
/**
* Get table information
*/
async getTableInfo(tableName: string): Promise<any> {
const result = await this.query(`SELECT * FROM table_columns WHERE table_name = ?`, [
tableName,
]);
return result.rows;
}
/**
* Check if PostgreSQL pool is healthy
*/
isPgPoolHealthy(): boolean {
return this.pgPool !== null && !this.pgPool.ended;
}
/**
* Get HTTP endpoint URL
*/
getHttpUrl(): string {
const protocol = this.config.tls?.enabled ? 'https' : 'http';
return `${protocol}://${this.config.host}:${this.config.httpPort}`;
}
/**
* Get InfluxDB endpoint URL
*/
getInfluxUrl(): string {
const protocol = this.config.tls?.enabled ? 'https' : 'http';
return `${protocol}://${this.config.host}:${this.config.influxPort}`;
}
/**
* Get health monitor instance
*/
getHealthMonitor(): QuestDBHealthMonitor {
return this.healthMonitor;
}
/**
* Get schema manager instance
*/
getSchemaManager(): QuestDBSchemaManager {
return this.schemaManager;
}
/**
* Get InfluxDB writer instance
*/
getInfluxWriter(): QuestDBInfluxWriter {
return this.influxWriter;
}
/**
* Optimize table by rebuilding partitions
*/
async optimizeTable(tableName: string): Promise<void> {
await this.query(`VACUUM TABLE ${tableName}`);
this.logger.info(`Optimized table: ${tableName}`);
}
/**
* Create a table with time-series optimizations
*/
async createTable(
tableName: string,
columns: string,
partitionBy: string = 'DAY',
timestampColumn: string = 'timestamp'
): Promise<void> {
const sql = `
CREATE TABLE IF NOT EXISTS ${tableName} (
${columns}
) TIMESTAMP(${timestampColumn}) PARTITION BY ${partitionBy}
`;
await this.query(sql);
this.logger.info(`Created table: ${tableName}`);
}
/**
* Check if client is connected
*/
get connected(): boolean {
return this.isConnected && !!this.pgPool;
}
/**
* Get the PostgreSQL connection pool
*/
get connectionPool(): Pool | null {
return this.pgPool;
}
/**
* Get configuration
*/
get configuration(): QuestDBClientConfig {
return { ...this.config };
}
private buildConfig(config?: Partial<QuestDBClientConfig>): QuestDBClientConfig {
return {
host: config?.host || questdbConfig.QUESTDB_HOST,
httpPort: config?.httpPort || questdbConfig.QUESTDB_HTTP_PORT,
pgPort: config?.pgPort || questdbConfig.QUESTDB_PG_PORT,
influxPort: config?.influxPort || questdbConfig.QUESTDB_INFLUX_PORT,
user: config?.user || questdbConfig.QUESTDB_USER,
password: config?.password || questdbConfig.QUESTDB_PASSWORD,
database: config?.database || questdbConfig.QUESTDB_DEFAULT_DATABASE,
tls: {
enabled: questdbConfig.QUESTDB_TLS_ENABLED,
verifyServerCert: questdbConfig.QUESTDB_TLS_VERIFY_SERVER_CERT,
...config?.tls,
},
timeouts: {
connection: questdbConfig.QUESTDB_CONNECTION_TIMEOUT,
request: questdbConfig.QUESTDB_REQUEST_TIMEOUT,
...config?.timeouts,
},
retryAttempts: questdbConfig.QUESTDB_RETRY_ATTEMPTS,
...config,
};
}
private buildPgPoolConfig(): any {
return {
host: this.config.host,
port: this.config.pgPort,
database: this.config.database,
user: this.config.user,
password: this.config.password,
connectionTimeoutMillis: this.config.timeouts?.connection,
query_timeout: this.config.timeouts?.request,
ssl: this.config.tls?.enabled
? {
rejectUnauthorized: this.config.tls.verifyServerCert,
}
: false,
min: 2,
max: 10,
};
}
private mapDataType(typeId: number): string {
// Map PostgreSQL type IDs to QuestDB types
const typeMap: Record<number, string> = {
16: 'BOOLEAN',
20: 'LONG',
21: 'INT',
23: 'INT',
25: 'STRING',
700: 'FLOAT',
701: 'DOUBLE',
1043: 'STRING',
1082: 'DATE',
1114: 'TIMESTAMP',
1184: 'TIMESTAMP',
};
return typeMap[typeId] || 'STRING';
}
private delay(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
}

View file

@ -1,63 +1,63 @@
import { QuestDBClient } from './client';
import { questdbConfig } from '@stock-bot/config';
import type { QuestDBClientConfig, QuestDBConnectionOptions } from './types';
/**
* Factory function to create a QuestDB client instance
*/
export function createQuestDBClient(
config?: Partial<QuestDBClientConfig>,
options?: QuestDBConnectionOptions
): QuestDBClient {
return new QuestDBClient(config, options);
}
/**
* Create a QuestDB client with default configuration
*/
export function createDefaultQuestDBClient(): QuestDBClient {
const config: Partial<QuestDBClientConfig> = {
host: questdbConfig.QUESTDB_HOST,
httpPort: questdbConfig.QUESTDB_HTTP_PORT,
pgPort: questdbConfig.QUESTDB_PG_PORT,
influxPort: questdbConfig.QUESTDB_INFLUX_PORT,
user: questdbConfig.QUESTDB_USER,
password: questdbConfig.QUESTDB_PASSWORD
};
return new QuestDBClient(config);
}
/**
* Singleton QuestDB client instance
*/
let defaultClient: QuestDBClient | null = null;
/**
* Get or create the default QuestDB client instance
*/
export function getQuestDBClient(): QuestDBClient {
if (!defaultClient) {
defaultClient = createDefaultQuestDBClient();
}
return defaultClient;
}
/**
* Connect to QuestDB using the default client
*/
export async function connectQuestDB(): Promise<QuestDBClient> {
const client = getQuestDBClient();
await client.connect();
return client;
}
/**
* Disconnect from QuestDB
*/
export async function disconnectQuestDB(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}
import { questdbConfig } from '@stock-bot/config';
import { QuestDBClient } from './client';
import type { QuestDBClientConfig, QuestDBConnectionOptions } from './types';
/**
* Factory function to create a QuestDB client instance
*/
export function createQuestDBClient(
config?: Partial<QuestDBClientConfig>,
options?: QuestDBConnectionOptions
): QuestDBClient {
return new QuestDBClient(config, options);
}
/**
* Create a QuestDB client with default configuration
*/
export function createDefaultQuestDBClient(): QuestDBClient {
const config: Partial<QuestDBClientConfig> = {
host: questdbConfig.QUESTDB_HOST,
httpPort: questdbConfig.QUESTDB_HTTP_PORT,
pgPort: questdbConfig.QUESTDB_PG_PORT,
influxPort: questdbConfig.QUESTDB_INFLUX_PORT,
user: questdbConfig.QUESTDB_USER,
password: questdbConfig.QUESTDB_PASSWORD,
};
return new QuestDBClient(config);
}
/**
* Singleton QuestDB client instance
*/
let defaultClient: QuestDBClient | null = null;
/**
* Get or create the default QuestDB client instance
*/
export function getQuestDBClient(): QuestDBClient {
if (!defaultClient) {
defaultClient = createDefaultQuestDBClient();
}
return defaultClient;
}
/**
* Connect to QuestDB using the default client
*/
export async function connectQuestDB(): Promise<QuestDBClient> {
const client = getQuestDBClient();
await client.connect();
return client;
}
/**
* Disconnect from QuestDB
*/
export async function disconnectQuestDB(): Promise<void> {
if (defaultClient) {
await defaultClient.disconnect();
defaultClient = null;
}
}

View file

@ -1,233 +1,234 @@
import { getLogger } from '@stock-bot/logger';
import type { HealthStatus, PerformanceMetrics, QueryResult } from './types';
// Interface to avoid circular dependency
interface QuestDBClientInterface {
query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>>;
isPgPoolHealthy(): boolean;
}
/**
* QuestDB Health Monitor
*
* Monitors connection health, performance metrics, and provides
* automatic recovery capabilities for the QuestDB client.
*/
export class QuestDBHealthMonitor {
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private lastHealthCheck: Date | null = null;
private performanceMetrics: PerformanceMetrics = {
totalQueries: 0,
successfulQueries: 0,
failedQueries: 0,
averageResponseTime: 0,
lastQueryTime: null,
connectionUptime: 0,
memoryUsage: 0
};
constructor(private readonly client: QuestDBClientInterface) {
this.logger = getLogger('questdb-health-monitor');
}
/**
* Start health monitoring
*/
public startMonitoring(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stopMonitoring();
}
this.logger.info(`Starting health monitoring with ${intervalMs}ms interval`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed', error);
});
}
/**
* Stop health monitoring
*/
public stopMonitoring(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Health monitoring stopped');
}
}
/**
* Perform a health check
*/
public async performHealthCheck(): Promise<HealthStatus> {
const startTime = Date.now();
try {
// Test basic connectivity with a simple query
await this.client.query('SELECT 1 as health_check');
const responseTime = Date.now() - startTime;
this.lastHealthCheck = new Date();
const status: HealthStatus = {
isHealthy: true,
lastCheck: this.lastHealthCheck,
responseTime,
message: 'Connection healthy',
details: {
pgPool: this.client.isPgPoolHealthy(),
httpEndpoint: true, // Will be implemented when HTTP client is added
uptime: this.getUptime()
}
};
this.logger.debug('Health check passed', { responseTime });
return status;
} catch (error) {
const responseTime = Date.now() - startTime;
this.lastHealthCheck = new Date();
const status: HealthStatus = {
isHealthy: false,
lastCheck: this.lastHealthCheck,
responseTime,
message: `Health check failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
error: error instanceof Error ? error : new Error('Unknown error'),
details: {
pgPool: false,
httpEndpoint: false,
uptime: this.getUptime()
}
};
this.logger.error('Health check failed', { error, responseTime });
return status;
}
}
/**
* Get current health status
*/
public async getHealthStatus(): Promise<HealthStatus> {
if (!this.lastHealthCheck || Date.now() - this.lastHealthCheck.getTime() > 60000) {
return await this.performHealthCheck();
}
// Return cached status if recent
return {
isHealthy: true,
lastCheck: this.lastHealthCheck,
responseTime: 0,
message: 'Using cached health status',
details: {
pgPool: this.client.isPgPoolHealthy(),
httpEndpoint: true,
uptime: this.getUptime()
}
};
}
/**
* Record query performance metrics
*/
public recordQuery(success: boolean, responseTime: number): void {
this.performanceMetrics.totalQueries++;
this.performanceMetrics.lastQueryTime = new Date();
if (success) {
this.performanceMetrics.successfulQueries++;
} else {
this.performanceMetrics.failedQueries++;
}
// Update rolling average response time
const totalResponseTime = this.performanceMetrics.averageResponseTime *
(this.performanceMetrics.totalQueries - 1) + responseTime;
this.performanceMetrics.averageResponseTime =
totalResponseTime / this.performanceMetrics.totalQueries;
// Update memory usage
this.performanceMetrics.memoryUsage = process.memoryUsage().heapUsed;
}
/**
* Get performance metrics
*/
public getPerformanceMetrics(): PerformanceMetrics {
return { ...this.performanceMetrics };
}
/**
* Get connection uptime in seconds
*/
private getUptime(): number {
return Math.floor(process.uptime());
}
/**
* Reset performance metrics
*/
public resetMetrics(): void {
this.performanceMetrics = {
totalQueries: 0,
successfulQueries: 0,
failedQueries: 0,
averageResponseTime: 0,
lastQueryTime: null,
connectionUptime: this.getUptime(),
memoryUsage: process.memoryUsage().heapUsed
};
this.logger.info('Performance metrics reset');
}
/**
* Get health summary for monitoring dashboards
*/
public async getHealthSummary(): Promise<{
status: HealthStatus;
metrics: PerformanceMetrics;
recommendations: string[];
}> {
const status = await this.getHealthStatus();
const metrics = this.getPerformanceMetrics();
const recommendations: string[] = [];
// Generate recommendations based on metrics
if (metrics.failedQueries > metrics.successfulQueries * 0.1) {
recommendations.push('High error rate detected - check query patterns');
}
if (metrics.averageResponseTime > 1000) {
recommendations.push('High response times - consider query optimization');
}
if (metrics.memoryUsage > 100 * 1024 * 1024) { // 100MB
recommendations.push('High memory usage - monitor for memory leaks');
}
return {
status,
metrics,
recommendations
};
}
/**
* Cleanup resources
*/
public destroy(): void {
this.stopMonitoring();
this.logger.info('Health monitor destroyed');
}
}
import { getLogger } from '@stock-bot/logger';
import type { HealthStatus, PerformanceMetrics, QueryResult } from './types';
// Interface to avoid circular dependency
interface QuestDBClientInterface {
query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>>;
isPgPoolHealthy(): boolean;
}
/**
* QuestDB Health Monitor
*
* Monitors connection health, performance metrics, and provides
* automatic recovery capabilities for the QuestDB client.
*/
export class QuestDBHealthMonitor {
private readonly logger: ReturnType<typeof getLogger>;
private healthCheckInterval: NodeJS.Timeout | null = null;
private lastHealthCheck: Date | null = null;
private performanceMetrics: PerformanceMetrics = {
totalQueries: 0,
successfulQueries: 0,
failedQueries: 0,
averageResponseTime: 0,
lastQueryTime: null,
connectionUptime: 0,
memoryUsage: 0,
};
constructor(private readonly client: QuestDBClientInterface) {
this.logger = getLogger('questdb-health-monitor');
}
/**
* Start health monitoring
*/
public startMonitoring(intervalMs: number = 30000): void {
if (this.healthCheckInterval) {
this.stopMonitoring();
}
this.logger.info(`Starting health monitoring with ${intervalMs}ms interval`);
this.healthCheckInterval = setInterval(async () => {
try {
await this.performHealthCheck();
} catch (error) {
this.logger.error('Health check failed', error);
}
}, intervalMs);
// Perform initial health check
this.performHealthCheck().catch(error => {
this.logger.error('Initial health check failed', error);
});
}
/**
* Stop health monitoring
*/
public stopMonitoring(): void {
if (this.healthCheckInterval) {
clearInterval(this.healthCheckInterval);
this.healthCheckInterval = null;
this.logger.info('Health monitoring stopped');
}
}
/**
* Perform a health check
*/
public async performHealthCheck(): Promise<HealthStatus> {
const startTime = Date.now();
try {
// Test basic connectivity with a simple query
await this.client.query('SELECT 1 as health_check');
const responseTime = Date.now() - startTime;
this.lastHealthCheck = new Date();
const status: HealthStatus = {
isHealthy: true,
lastCheck: this.lastHealthCheck,
responseTime,
message: 'Connection healthy',
details: {
pgPool: this.client.isPgPoolHealthy(),
httpEndpoint: true, // Will be implemented when HTTP client is added
uptime: this.getUptime(),
},
};
this.logger.debug('Health check passed', { responseTime });
return status;
} catch (error) {
const responseTime = Date.now() - startTime;
this.lastHealthCheck = new Date();
const status: HealthStatus = {
isHealthy: false,
lastCheck: this.lastHealthCheck,
responseTime,
message: `Health check failed: ${error instanceof Error ? error.message : 'Unknown error'}`,
error: error instanceof Error ? error : new Error('Unknown error'),
details: {
pgPool: false,
httpEndpoint: false,
uptime: this.getUptime(),
},
};
this.logger.error('Health check failed', { error, responseTime });
return status;
}
}
/**
* Get current health status
*/
public async getHealthStatus(): Promise<HealthStatus> {
if (!this.lastHealthCheck || Date.now() - this.lastHealthCheck.getTime() > 60000) {
return await this.performHealthCheck();
}
// Return cached status if recent
return {
isHealthy: true,
lastCheck: this.lastHealthCheck,
responseTime: 0,
message: 'Using cached health status',
details: {
pgPool: this.client.isPgPoolHealthy(),
httpEndpoint: true,
uptime: this.getUptime(),
},
};
}
/**
* Record query performance metrics
*/
public recordQuery(success: boolean, responseTime: number): void {
this.performanceMetrics.totalQueries++;
this.performanceMetrics.lastQueryTime = new Date();
if (success) {
this.performanceMetrics.successfulQueries++;
} else {
this.performanceMetrics.failedQueries++;
}
// Update rolling average response time
const totalResponseTime =
this.performanceMetrics.averageResponseTime * (this.performanceMetrics.totalQueries - 1) +
responseTime;
this.performanceMetrics.averageResponseTime =
totalResponseTime / this.performanceMetrics.totalQueries;
// Update memory usage
this.performanceMetrics.memoryUsage = process.memoryUsage().heapUsed;
}
/**
* Get performance metrics
*/
public getPerformanceMetrics(): PerformanceMetrics {
return { ...this.performanceMetrics };
}
/**
* Get connection uptime in seconds
*/
private getUptime(): number {
return Math.floor(process.uptime());
}
/**
* Reset performance metrics
*/
public resetMetrics(): void {
this.performanceMetrics = {
totalQueries: 0,
successfulQueries: 0,
failedQueries: 0,
averageResponseTime: 0,
lastQueryTime: null,
connectionUptime: this.getUptime(),
memoryUsage: process.memoryUsage().heapUsed,
};
this.logger.info('Performance metrics reset');
}
/**
* Get health summary for monitoring dashboards
*/
public async getHealthSummary(): Promise<{
status: HealthStatus;
metrics: PerformanceMetrics;
recommendations: string[];
}> {
const status = await this.getHealthStatus();
const metrics = this.getPerformanceMetrics();
const recommendations: string[] = [];
// Generate recommendations based on metrics
if (metrics.failedQueries > metrics.successfulQueries * 0.1) {
recommendations.push('High error rate detected - check query patterns');
}
if (metrics.averageResponseTime > 1000) {
recommendations.push('High response times - consider query optimization');
}
if (metrics.memoryUsage > 100 * 1024 * 1024) {
// 100MB
recommendations.push('High memory usage - monitor for memory leaks');
}
return {
status,
metrics,
recommendations,
};
}
/**
* Cleanup resources
*/
public destroy(): void {
this.stopMonitoring();
this.logger.info('Health monitor destroyed');
}
}

View file

@ -1,32 +1,32 @@
/**
* QuestDB Client Library for Stock Bot
*
* Provides high-performance time-series data access with support for
* InfluxDB Line Protocol, SQL queries, and PostgreSQL wire protocol.
*/
export { QuestDBClient } from './client';
export { QuestDBHealthMonitor } from './health';
export { QuestDBQueryBuilder } from './query-builder';
export { QuestDBInfluxWriter } from './influx-writer';
export { QuestDBSchemaManager } from './schema';
// Types
export type {
QuestDBClientConfig,
QuestDBConnectionOptions,
QuestDBHealthStatus,
QuestDBMetrics,
TableNames,
OHLCVData,
TradeData,
QuoteData,
IndicatorData,
PerformanceData,
RiskMetrics,
QueryResult,
InsertResult
} from './types';
// Utils
export { createQuestDBClient, getQuestDBClient } from './factory';
/**
* QuestDB Client Library for Stock Bot
*
* Provides high-performance time-series data access with support for
* InfluxDB Line Protocol, SQL queries, and PostgreSQL wire protocol.
*/
export { QuestDBClient } from './client';
export { QuestDBHealthMonitor } from './health';
export { QuestDBQueryBuilder } from './query-builder';
export { QuestDBInfluxWriter } from './influx-writer';
export { QuestDBSchemaManager } from './schema';
// Types
export type {
QuestDBClientConfig,
QuestDBConnectionOptions,
QuestDBHealthStatus,
QuestDBMetrics,
TableNames,
OHLCVData,
TradeData,
QuoteData,
IndicatorData,
PerformanceData,
RiskMetrics,
QueryResult,
InsertResult,
} from './types';
// Utils
export { createQuestDBClient, getQuestDBClient } from './factory';

View file

@ -1,436 +1,430 @@
import { getLogger } from '@stock-bot/logger';
import type {
InfluxLineData,
InfluxWriteOptions,
BaseTimeSeriesData
} from './types';
// Interface to avoid circular dependency
interface QuestDBClientInterface {
getHttpUrl(): string;
}
/**
* QuestDB InfluxDB Line Protocol Writer
*
* Provides high-performance data ingestion using InfluxDB Line Protocol
* which QuestDB supports natively for optimal time-series data insertion.
*/
export class QuestDBInfluxWriter {
private readonly logger: ReturnType<typeof getLogger>;
private writeBuffer: string[] = [];
private flushTimer: NodeJS.Timeout | null = null;
private readonly defaultOptions: Required<InfluxWriteOptions> = {
batchSize: 1000,
flushInterval: 5000,
autoFlush: true,
precision: 'ms',
retryAttempts: 3,
retryDelay: 1000
};
constructor(private readonly client: QuestDBClientInterface) {
this.logger = getLogger('questdb-influx-writer');
}
/**
* Write single data point using InfluxDB Line Protocol
*/
public async writePoint(
measurement: string,
tags: Record<string, string>,
fields: Record<string, number | string | boolean>,
timestamp?: Date,
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const line = this.buildLineProtocol(measurement, tags, fields, timestamp);
const opts = { ...this.defaultOptions, ...options };
if (opts.autoFlush && this.writeBuffer.length === 0) {
// Single point write - send immediately
await this.sendLines([line], opts);
} else {
// Add to buffer
this.writeBuffer.push(line);
if (opts.autoFlush) {
this.scheduleFlush(opts);
}
// Flush if buffer is full
if (this.writeBuffer.length >= opts.batchSize) {
await this.flush(opts);
}
}
}
/**
* Write multiple data points
*/
public async writePoints(
data: InfluxLineData[],
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const opts = { ...this.defaultOptions, ...options };
const lines = data.map(point =>
this.buildLineProtocol(point.measurement, point.tags, point.fields, point.timestamp)
);
if (opts.autoFlush) {
// Send immediately for batch writes
await this.sendLines(lines, opts);
} else {
// Add to buffer
this.writeBuffer.push(...lines);
// Flush if buffer exceeds batch size
while (this.writeBuffer.length >= opts.batchSize) {
const batch = this.writeBuffer.splice(0, opts.batchSize);
await this.sendLines(batch, opts);
}
}
}
/**
* Write OHLCV data optimized for QuestDB
*/
public async writeOHLCV(
symbol: string,
exchange: string,
data: {
timestamp: Date;
open: number;
high: number;
low: number;
close: number;
volume: number;
}[],
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const influxData: InfluxLineData[] = data.map(candle => ({
measurement: 'ohlcv_data',
tags: {
symbol,
exchange,
data_source: 'market_feed'
},
fields: {
open: candle.open,
high: candle.high,
low: candle.low,
close: candle.close,
volume: candle.volume
},
timestamp: candle.timestamp
}));
await this.writePoints(influxData, options);
}
/**
* Write market analytics data
*/
public async writeMarketAnalytics(
symbol: string,
exchange: string,
analytics: {
timestamp: Date;
rsi?: number;
macd?: number;
signal?: number;
histogram?: number;
bollinger_upper?: number;
bollinger_lower?: number;
volume_sma?: number;
},
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const fields: Record<string, number> = {};
// Only include defined values
Object.entries(analytics).forEach(([key, value]) => {
if (key !== 'timestamp' && value !== undefined && value !== null) {
fields[key] = value as number;
}
});
if (Object.keys(fields).length === 0) {
this.logger.warn('No analytics fields to write', { symbol, timestamp: analytics.timestamp });
return;
}
await this.writePoint(
'market_analytics',
{ symbol, exchange },
fields,
analytics.timestamp,
options
);
}
/**
* Write trade execution data
*/
public async writeTradeExecution(
execution: {
symbol: string;
side: 'buy' | 'sell';
quantity: number;
price: number;
timestamp: Date;
executionTime: number;
orderId?: string;
strategy?: string;
},
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const tags: Record<string, string> = {
symbol: execution.symbol,
side: execution.side
};
if (execution.orderId) {
tags.order_id = execution.orderId;
}
if (execution.strategy) {
tags.strategy = execution.strategy;
}
await this.writePoint(
'trade_executions',
tags,
{
quantity: execution.quantity,
price: execution.price,
execution_time: execution.executionTime
},
execution.timestamp,
options
);
}
/**
* Write performance metrics
*/
public async writePerformanceMetrics(
metrics: {
timestamp: Date;
operation: string;
responseTime: number;
success: boolean;
errorCode?: string;
},
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const tags: Record<string, string> = {
operation: metrics.operation,
success: metrics.success.toString()
};
if (metrics.errorCode) {
tags.error_code = metrics.errorCode;
}
await this.writePoint(
'performance_metrics',
tags,
{ response_time: metrics.responseTime },
metrics.timestamp,
options
);
}
/**
* Manually flush the write buffer
*/
public async flush(options?: Partial<InfluxWriteOptions>): Promise<void> {
if (this.writeBuffer.length === 0) {
return;
}
const opts = { ...this.defaultOptions, ...options };
const lines = this.writeBuffer.splice(0); // Clear buffer
if (this.flushTimer) {
clearTimeout(this.flushTimer);
this.flushTimer = null;
}
await this.sendLines(lines, opts);
}
/**
* Get current buffer size
*/
public getBufferSize(): number {
return this.writeBuffer.length;
}
/**
* Clear the buffer without writing
*/
public clearBuffer(): void {
this.writeBuffer.length = 0;
if (this.flushTimer) {
clearTimeout(this.flushTimer);
this.flushTimer = null;
}
}
/**
* Build InfluxDB Line Protocol string
*/
private buildLineProtocol(
measurement: string,
tags: Record<string, string>,
fields: Record<string, number | string | boolean>,
timestamp?: Date
): string {
// Escape special characters in measurement name
const escapedMeasurement = measurement.replace(/[, =]/g, '\\$&');
// Build tags string
const tagString = Object.entries(tags)
.filter(([_, value]) => value !== undefined && value !== null)
.map(([key, value]) => `${this.escapeTagKey(key)}=${this.escapeTagValue(value)}`)
.join(',');
// Build fields string
const fieldString = Object.entries(fields)
.filter(([_, value]) => value !== undefined && value !== null)
.map(([key, value]) => `${this.escapeFieldKey(key)}=${this.formatFieldValue(value)}`)
.join(',');
// Build timestamp
const timestampString = timestamp ?
Math.floor(timestamp.getTime() * 1000000).toString() : // Convert to nanoseconds
'';
// Combine parts
let line = escapedMeasurement;
if (tagString) {
line += `,${tagString}`;
}
line += ` ${fieldString}`;
if (timestampString) {
line += ` ${timestampString}`;
}
return line;
}
/**
* Send lines to QuestDB via HTTP endpoint
*/
private async sendLines(
lines: string[],
options: Required<InfluxWriteOptions>
): Promise<void> {
if (lines.length === 0) {
return;
}
const payload = lines.join('\n');
let attempt = 0;
while (attempt <= options.retryAttempts) {
try {
// QuestDB InfluxDB Line Protocol endpoint
const response = await fetch(`${this.client.getHttpUrl()}/write`, {
method: 'POST',
headers: {
'Content-Type': 'text/plain',
},
body: payload
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
this.logger.debug(`Successfully wrote ${lines.length} lines to QuestDB`);
return;
} catch (error) {
attempt++;
this.logger.error(`Write attempt ${attempt} failed`, {
error,
linesCount: lines.length,
willRetry: attempt <= options.retryAttempts
});
if (attempt <= options.retryAttempts) {
await this.sleep(options.retryDelay * attempt); // Exponential backoff
} else {
throw new Error(`Failed to write to QuestDB after ${options.retryAttempts} attempts: $error`);
}
}
}
}
/**
* Schedule automatic flush
*/
private scheduleFlush(options: Required<InfluxWriteOptions>): void {
if (this.flushTimer || !options.autoFlush) {
return;
}
this.flushTimer = setTimeout(async () => {
try {
await this.flush(options);
} catch (error) {
this.logger.error('Scheduled flush failed', error);
}
}, options.flushInterval);
}
/**
* Format field value for InfluxDB Line Protocol
*/
private formatFieldValue(value: number | string | boolean): string {
if (typeof value === 'string') {
return `"${value.replace(/"/g, '\\"')}"`;
} else if (typeof value === 'boolean') {
return value ? 'true' : 'false';
} else {
return value.toString();
}
}
/**
* Escape tag key
*/
private escapeTagKey(key: string): string {
return key.replace(/[, =]/g, '\\$&');
}
/**
* Escape tag value
*/
private escapeTagValue(value: string): string {
return value.replace(/[, =]/g, '\\$&');
}
/**
* Escape field key
*/
private escapeFieldKey(key: string): string {
return key.replace(/[, =]/g, '\\$&');
}
/**
* Sleep utility
*/
private sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* Cleanup resources
*/
public destroy(): void {
this.clearBuffer();
this.logger.info('InfluxDB writer destroyed');
}
}
import { getLogger } from '@stock-bot/logger';
import type { BaseTimeSeriesData, InfluxLineData, InfluxWriteOptions } from './types';
// Interface to avoid circular dependency
interface QuestDBClientInterface {
getHttpUrl(): string;
}
/**
* QuestDB InfluxDB Line Protocol Writer
*
* Provides high-performance data ingestion using InfluxDB Line Protocol
* which QuestDB supports natively for optimal time-series data insertion.
*/
export class QuestDBInfluxWriter {
private readonly logger: ReturnType<typeof getLogger>;
private writeBuffer: string[] = [];
private flushTimer: NodeJS.Timeout | null = null;
private readonly defaultOptions: Required<InfluxWriteOptions> = {
batchSize: 1000,
flushInterval: 5000,
autoFlush: true,
precision: 'ms',
retryAttempts: 3,
retryDelay: 1000,
};
constructor(private readonly client: QuestDBClientInterface) {
this.logger = getLogger('questdb-influx-writer');
}
/**
* Write single data point using InfluxDB Line Protocol
*/
public async writePoint(
measurement: string,
tags: Record<string, string>,
fields: Record<string, number | string | boolean>,
timestamp?: Date,
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const line = this.buildLineProtocol(measurement, tags, fields, timestamp);
const opts = { ...this.defaultOptions, ...options };
if (opts.autoFlush && this.writeBuffer.length === 0) {
// Single point write - send immediately
await this.sendLines([line], opts);
} else {
// Add to buffer
this.writeBuffer.push(line);
if (opts.autoFlush) {
this.scheduleFlush(opts);
}
// Flush if buffer is full
if (this.writeBuffer.length >= opts.batchSize) {
await this.flush(opts);
}
}
}
/**
* Write multiple data points
*/
public async writePoints(
data: InfluxLineData[],
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const opts = { ...this.defaultOptions, ...options };
const lines = data.map(point =>
this.buildLineProtocol(point.measurement, point.tags, point.fields, point.timestamp)
);
if (opts.autoFlush) {
// Send immediately for batch writes
await this.sendLines(lines, opts);
} else {
// Add to buffer
this.writeBuffer.push(...lines);
// Flush if buffer exceeds batch size
while (this.writeBuffer.length >= opts.batchSize) {
const batch = this.writeBuffer.splice(0, opts.batchSize);
await this.sendLines(batch, opts);
}
}
}
/**
* Write OHLCV data optimized for QuestDB
*/
public async writeOHLCV(
symbol: string,
exchange: string,
data: {
timestamp: Date;
open: number;
high: number;
low: number;
close: number;
volume: number;
}[],
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const influxData: InfluxLineData[] = data.map(candle => ({
measurement: 'ohlcv_data',
tags: {
symbol,
exchange,
data_source: 'market_feed',
},
fields: {
open: candle.open,
high: candle.high,
low: candle.low,
close: candle.close,
volume: candle.volume,
},
timestamp: candle.timestamp,
}));
await this.writePoints(influxData, options);
}
/**
* Write market analytics data
*/
public async writeMarketAnalytics(
symbol: string,
exchange: string,
analytics: {
timestamp: Date;
rsi?: number;
macd?: number;
signal?: number;
histogram?: number;
bollinger_upper?: number;
bollinger_lower?: number;
volume_sma?: number;
},
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const fields: Record<string, number> = {};
// Only include defined values
Object.entries(analytics).forEach(([key, value]) => {
if (key !== 'timestamp' && value !== undefined && value !== null) {
fields[key] = value as number;
}
});
if (Object.keys(fields).length === 0) {
this.logger.warn('No analytics fields to write', { symbol, timestamp: analytics.timestamp });
return;
}
await this.writePoint(
'market_analytics',
{ symbol, exchange },
fields,
analytics.timestamp,
options
);
}
/**
* Write trade execution data
*/
public async writeTradeExecution(
execution: {
symbol: string;
side: 'buy' | 'sell';
quantity: number;
price: number;
timestamp: Date;
executionTime: number;
orderId?: string;
strategy?: string;
},
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const tags: Record<string, string> = {
symbol: execution.symbol,
side: execution.side,
};
if (execution.orderId) {
tags.order_id = execution.orderId;
}
if (execution.strategy) {
tags.strategy = execution.strategy;
}
await this.writePoint(
'trade_executions',
tags,
{
quantity: execution.quantity,
price: execution.price,
execution_time: execution.executionTime,
},
execution.timestamp,
options
);
}
/**
* Write performance metrics
*/
public async writePerformanceMetrics(
metrics: {
timestamp: Date;
operation: string;
responseTime: number;
success: boolean;
errorCode?: string;
},
options?: Partial<InfluxWriteOptions>
): Promise<void> {
const tags: Record<string, string> = {
operation: metrics.operation,
success: metrics.success.toString(),
};
if (metrics.errorCode) {
tags.error_code = metrics.errorCode;
}
await this.writePoint(
'performance_metrics',
tags,
{ response_time: metrics.responseTime },
metrics.timestamp,
options
);
}
/**
* Manually flush the write buffer
*/
public async flush(options?: Partial<InfluxWriteOptions>): Promise<void> {
if (this.writeBuffer.length === 0) {
return;
}
const opts = { ...this.defaultOptions, ...options };
const lines = this.writeBuffer.splice(0); // Clear buffer
if (this.flushTimer) {
clearTimeout(this.flushTimer);
this.flushTimer = null;
}
await this.sendLines(lines, opts);
}
/**
* Get current buffer size
*/
public getBufferSize(): number {
return this.writeBuffer.length;
}
/**
* Clear the buffer without writing
*/
public clearBuffer(): void {
this.writeBuffer.length = 0;
if (this.flushTimer) {
clearTimeout(this.flushTimer);
this.flushTimer = null;
}
}
/**
* Build InfluxDB Line Protocol string
*/
private buildLineProtocol(
measurement: string,
tags: Record<string, string>,
fields: Record<string, number | string | boolean>,
timestamp?: Date
): string {
// Escape special characters in measurement name
const escapedMeasurement = measurement.replace(/[, =]/g, '\\$&');
// Build tags string
const tagString = Object.entries(tags)
.filter(([_, value]) => value !== undefined && value !== null)
.map(([key, value]) => `${this.escapeTagKey(key)}=${this.escapeTagValue(value)}`)
.join(',');
// Build fields string
const fieldString = Object.entries(fields)
.filter(([_, value]) => value !== undefined && value !== null)
.map(([key, value]) => `${this.escapeFieldKey(key)}=${this.formatFieldValue(value)}`)
.join(',');
// Build timestamp
const timestampString = timestamp
? Math.floor(timestamp.getTime() * 1000000).toString() // Convert to nanoseconds
: '';
// Combine parts
let line = escapedMeasurement;
if (tagString) {
line += `,${tagString}`;
}
line += ` ${fieldString}`;
if (timestampString) {
line += ` ${timestampString}`;
}
return line;
}
/**
* Send lines to QuestDB via HTTP endpoint
*/
private async sendLines(lines: string[], options: Required<InfluxWriteOptions>): Promise<void> {
if (lines.length === 0) {
return;
}
const payload = lines.join('\n');
let attempt = 0;
while (attempt <= options.retryAttempts) {
try {
// QuestDB InfluxDB Line Protocol endpoint
const response = await fetch(`${this.client.getHttpUrl()}/write`, {
method: 'POST',
headers: {
'Content-Type': 'text/plain',
},
body: payload,
});
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
}
this.logger.debug(`Successfully wrote ${lines.length} lines to QuestDB`);
return;
} catch (error) {
attempt++;
this.logger.error(`Write attempt ${attempt} failed`, {
error,
linesCount: lines.length,
willRetry: attempt <= options.retryAttempts,
});
if (attempt <= options.retryAttempts) {
await this.sleep(options.retryDelay * attempt); // Exponential backoff
} else {
throw new Error(
`Failed to write to QuestDB after ${options.retryAttempts} attempts: $error`
);
}
}
}
}
/**
* Schedule automatic flush
*/
private scheduleFlush(options: Required<InfluxWriteOptions>): void {
if (this.flushTimer || !options.autoFlush) {
return;
}
this.flushTimer = setTimeout(async () => {
try {
await this.flush(options);
} catch (error) {
this.logger.error('Scheduled flush failed', error);
}
}, options.flushInterval);
}
/**
* Format field value for InfluxDB Line Protocol
*/
private formatFieldValue(value: number | string | boolean): string {
if (typeof value === 'string') {
return `"${value.replace(/"/g, '\\"')}"`;
} else if (typeof value === 'boolean') {
return value ? 'true' : 'false';
} else {
return value.toString();
}
}
/**
* Escape tag key
*/
private escapeTagKey(key: string): string {
return key.replace(/[, =]/g, '\\$&');
}
/**
* Escape tag value
*/
private escapeTagValue(value: string): string {
return value.replace(/[, =]/g, '\\$&');
}
/**
* Escape field key
*/
private escapeFieldKey(key: string): string {
return key.replace(/[, =]/g, '\\$&');
}
/**
* Sleep utility
*/
private sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms));
}
/**
* Cleanup resources
*/
public destroy(): void {
this.clearBuffer();
this.logger.info('InfluxDB writer destroyed');
}
}

View file

@ -1,368 +1,376 @@
import { getLogger } from '@stock-bot/logger';
import type {
QueryResult,
TimeSeriesQuery,
AggregationQuery,
TimeRange,
TableNames
} from './types';
// Interface to avoid circular dependency
interface QuestDBClientInterface {
query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>>;
}
/**
* QuestDB Query Builder
*
* Provides a fluent interface for building optimized time-series queries
* with support for QuestDB-specific functions and optimizations.
*/
export class QuestDBQueryBuilder {
private readonly logger: ReturnType<typeof getLogger>;
private query!: {
select: string[];
from: string;
where: string[];
groupBy: string[];
orderBy: string[];
limit?: number;
sampleBy?: string;
latestBy?: string[];
timeRange?: TimeRange;
};
constructor(private readonly client: QuestDBClientInterface) {
this.logger = getLogger('questdb-query-builder');
this.reset();
}
/**
* Reset the query builder
*/
private reset(): QuestDBQueryBuilder {
this.query = {
select: [],
from: '',
where: [],
groupBy: [],
orderBy: [],
sampleBy: undefined,
latestBy: undefined,
timeRange: undefined
};
return this;
}
/**
* Start a new query
*/
public static create(client: QuestDBClientInterface): QuestDBQueryBuilder {
return new QuestDBQueryBuilder(client);
}
/**
* Select columns
*/
public select(...columns: string[]): QuestDBQueryBuilder {
this.query.select.push(...columns);
return this;
}
/**
* Select with aggregation functions
*/
public selectAgg(aggregations: Record<string, string>): QuestDBQueryBuilder {
Object.entries(aggregations).forEach(([alias, expression]) => {
this.query.select.push(`${expression} as ${alias}`);
});
return this;
}
/**
* From table
*/
public from(table: TableNames | string): QuestDBQueryBuilder {
this.query.from = table;
return this;
}
/**
* Where condition
*/
public where(condition: string): QuestDBQueryBuilder {
this.query.where.push(condition);
return this;
}
/**
* Where symbol equals
*/
public whereSymbol(symbol: string): QuestDBQueryBuilder {
this.query.where.push(`symbol = '${symbol}'`);
return this;
}
/**
* Where symbols in list
*/
public whereSymbolIn(symbols: string[]): QuestDBQueryBuilder {
const symbolList = symbols.map(s => `'${s}'`).join(', ');
this.query.where.push(`symbol IN (${symbolList})`);
return this;
}
/**
* Where exchange equals
*/
public whereExchange(exchange: string): QuestDBQueryBuilder {
this.query.where.push(`exchange = '${exchange}'`);
return this;
}
/**
* Time range filter
*/
public whereTimeRange(startTime: Date, endTime: Date): QuestDBQueryBuilder {
this.query.timeRange = { startTime, endTime };
this.query.where.push(
`timestamp >= '${startTime.toISOString()}' AND timestamp <= '${endTime.toISOString()}'`
);
return this;
}
/**
* Last N hours
*/
public whereLastHours(hours: number): QuestDBQueryBuilder {
this.query.where.push(`timestamp > dateadd('h', -${hours}, now())`);
return this;
}
/**
* Last N days
*/
public whereLastDays(days: number): QuestDBQueryBuilder {
this.query.where.push(`timestamp > dateadd('d', -${days}, now())`);
return this;
}
/**
* Group by columns
*/
public groupBy(...columns: string[]): QuestDBQueryBuilder {
this.query.groupBy.push(...columns);
return this;
}
/**
* Order by column
*/
public orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): QuestDBQueryBuilder {
this.query.orderBy.push(`${column} ${direction}`);
return this;
}
/**
* Order by timestamp descending (most recent first)
*/
public orderByTimeDesc(): QuestDBQueryBuilder {
this.query.orderBy.push('timestamp DESC');
return this;
}
/**
* Limit results
*/
public limit(count: number): QuestDBQueryBuilder {
this.query.limit = count;
return this;
}
/**
* Sample by time interval (QuestDB specific)
*/
public sampleBy(interval: string): QuestDBQueryBuilder {
this.query.sampleBy = interval;
return this;
}
/**
* Latest by columns (QuestDB specific)
*/
public latestBy(...columns: string[]): QuestDBQueryBuilder {
this.query.latestBy = columns;
return this;
}
/**
* Build and execute the query
*/
public async execute<T = any>(): Promise<QueryResult<T>> {
const sql = this.build();
this.logger.debug('Executing query', { sql });
try {
const result = await this.client.query<T>(sql);
this.reset(); // Reset for next query
return result;
} catch (error) {
this.logger.error('Query execution failed', { sql, error });
this.reset(); // Reset even on error
throw error;
}
}
/**
* Build the SQL query string
*/
public build(): string {
if (!this.query.from) {
throw new Error('FROM clause is required');
}
if (this.query.select.length === 0) {
this.query.select.push('*');
}
let sql = `SELECT ${this.query.select.join(', ')} FROM ${this.query.from}`;
// Add WHERE clause
if (this.query.where.length > 0) {
sql += ` WHERE ${this.query.where.join(' AND ')}`;
}
// Add LATEST BY (QuestDB specific - must come before GROUP BY)
if (this.query.latestBy && this.query.latestBy.length > 0) {
sql += ` LATEST BY ${this.query.latestBy.join(', ')}`;
}
// Add SAMPLE BY (QuestDB specific)
if (this.query.sampleBy) {
sql += ` SAMPLE BY ${this.query.sampleBy}`;
}
// Add GROUP BY
if (this.query.groupBy.length > 0) {
sql += ` GROUP BY ${this.query.groupBy.join(', ')}`;
}
// Add ORDER BY
if (this.query.orderBy.length > 0) {
sql += ` ORDER BY ${this.query.orderBy.join(', ')}`;
}
// Add LIMIT
if (this.query.limit) {
sql += ` LIMIT ${this.query.limit}`;
}
return sql;
}
/**
* Get the built query without executing
*/
public toSQL(): string {
return this.build();
}
// Predefined query methods for common use cases
/**
* Get latest OHLCV data for symbols
*/
public static latestOHLCV(
client: QuestDBClientInterface,
symbols: string[],
exchange?: string
): QuestDBQueryBuilder {
const builder = QuestDBQueryBuilder.create(client)
.select('symbol', 'timestamp', 'open', 'high', 'low', 'close', 'volume')
.from('ohlcv_data')
.whereSymbolIn(symbols)
.latestBy('symbol')
.orderByTimeDesc();
if (exchange) {
builder.whereExchange(exchange);
}
return builder;
}
/**
* Get OHLCV data with time sampling
*/
public static ohlcvTimeSeries(
client: QuestDBClientInterface,
symbol: string,
interval: string,
hours: number = 24
): QuestDBQueryBuilder {
return QuestDBQueryBuilder.create(client)
.selectAgg({
'first_open': 'first(open)',
'max_high': 'max(high)',
'min_low': 'min(low)',
'last_close': 'last(close)',
'sum_volume': 'sum(volume)'
})
.from('ohlcv_data')
.whereSymbol(symbol)
.whereLastHours(hours)
.sampleBy(interval)
.orderByTimeDesc();
}
/**
* Get market analytics data
*/
public static marketAnalytics(
client: QuestDBClientInterface,
symbols: string[],
hours: number = 1
): QuestDBQueryBuilder {
return QuestDBQueryBuilder.create(client)
.select('symbol', 'timestamp', 'rsi', 'macd', 'bollinger_upper', 'bollinger_lower', 'volume_sma')
.from('market_analytics')
.whereSymbolIn(symbols)
.whereLastHours(hours)
.orderBy('symbol')
.orderByTimeDesc();
}
/**
* Get performance metrics for a time range
*/
public static performanceMetrics(
client: QuestDBClientInterface,
startTime: Date,
endTime: Date
): QuestDBQueryBuilder {
return QuestDBQueryBuilder.create(client)
.selectAgg({
'total_trades': 'count(*)',
'avg_response_time': 'avg(response_time)',
'max_response_time': 'max(response_time)',
'error_rate': 'sum(case when success = false then 1 else 0 end) * 100.0 / count(*)'
})
.from('performance_metrics')
.whereTimeRange(startTime, endTime)
.sampleBy('1m');
}
/**
* Get trade execution data
*/
public static tradeExecutions(
client: QuestDBClientInterface,
symbol?: string,
hours: number = 24
): QuestDBQueryBuilder {
const builder = QuestDBQueryBuilder.create(client)
.select('symbol', 'timestamp', 'side', 'quantity', 'price', 'execution_time')
.from('trade_executions')
.whereLastHours(hours)
.orderByTimeDesc();
if (symbol) {
builder.whereSymbol(symbol);
}
return builder;
}
}
import { getLogger } from '@stock-bot/logger';
import type {
AggregationQuery,
QueryResult,
TableNames,
TimeRange,
TimeSeriesQuery,
} from './types';
// Interface to avoid circular dependency
interface QuestDBClientInterface {
query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>>;
}
/**
* QuestDB Query Builder
*
* Provides a fluent interface for building optimized time-series queries
* with support for QuestDB-specific functions and optimizations.
*/
export class QuestDBQueryBuilder {
private readonly logger: ReturnType<typeof getLogger>;
private query!: {
select: string[];
from: string;
where: string[];
groupBy: string[];
orderBy: string[];
limit?: number;
sampleBy?: string;
latestBy?: string[];
timeRange?: TimeRange;
};
constructor(private readonly client: QuestDBClientInterface) {
this.logger = getLogger('questdb-query-builder');
this.reset();
}
/**
* Reset the query builder
*/
private reset(): QuestDBQueryBuilder {
this.query = {
select: [],
from: '',
where: [],
groupBy: [],
orderBy: [],
sampleBy: undefined,
latestBy: undefined,
timeRange: undefined,
};
return this;
}
/**
* Start a new query
*/
public static create(client: QuestDBClientInterface): QuestDBQueryBuilder {
return new QuestDBQueryBuilder(client);
}
/**
* Select columns
*/
public select(...columns: string[]): QuestDBQueryBuilder {
this.query.select.push(...columns);
return this;
}
/**
* Select with aggregation functions
*/
public selectAgg(aggregations: Record<string, string>): QuestDBQueryBuilder {
Object.entries(aggregations).forEach(([alias, expression]) => {
this.query.select.push(`${expression} as ${alias}`);
});
return this;
}
/**
* From table
*/
public from(table: TableNames | string): QuestDBQueryBuilder {
this.query.from = table;
return this;
}
/**
* Where condition
*/
public where(condition: string): QuestDBQueryBuilder {
this.query.where.push(condition);
return this;
}
/**
* Where symbol equals
*/
public whereSymbol(symbol: string): QuestDBQueryBuilder {
this.query.where.push(`symbol = '${symbol}'`);
return this;
}
/**
* Where symbols in list
*/
public whereSymbolIn(symbols: string[]): QuestDBQueryBuilder {
const symbolList = symbols.map(s => `'${s}'`).join(', ');
this.query.where.push(`symbol IN (${symbolList})`);
return this;
}
/**
* Where exchange equals
*/
public whereExchange(exchange: string): QuestDBQueryBuilder {
this.query.where.push(`exchange = '${exchange}'`);
return this;
}
/**
* Time range filter
*/
public whereTimeRange(startTime: Date, endTime: Date): QuestDBQueryBuilder {
this.query.timeRange = { startTime, endTime };
this.query.where.push(
`timestamp >= '${startTime.toISOString()}' AND timestamp <= '${endTime.toISOString()}'`
);
return this;
}
/**
* Last N hours
*/
public whereLastHours(hours: number): QuestDBQueryBuilder {
this.query.where.push(`timestamp > dateadd('h', -${hours}, now())`);
return this;
}
/**
* Last N days
*/
public whereLastDays(days: number): QuestDBQueryBuilder {
this.query.where.push(`timestamp > dateadd('d', -${days}, now())`);
return this;
}
/**
* Group by columns
*/
public groupBy(...columns: string[]): QuestDBQueryBuilder {
this.query.groupBy.push(...columns);
return this;
}
/**
* Order by column
*/
public orderBy(column: string, direction: 'ASC' | 'DESC' = 'ASC'): QuestDBQueryBuilder {
this.query.orderBy.push(`${column} ${direction}`);
return this;
}
/**
* Order by timestamp descending (most recent first)
*/
public orderByTimeDesc(): QuestDBQueryBuilder {
this.query.orderBy.push('timestamp DESC');
return this;
}
/**
* Limit results
*/
public limit(count: number): QuestDBQueryBuilder {
this.query.limit = count;
return this;
}
/**
* Sample by time interval (QuestDB specific)
*/
public sampleBy(interval: string): QuestDBQueryBuilder {
this.query.sampleBy = interval;
return this;
}
/**
* Latest by columns (QuestDB specific)
*/
public latestBy(...columns: string[]): QuestDBQueryBuilder {
this.query.latestBy = columns;
return this;
}
/**
* Build and execute the query
*/
public async execute<T = any>(): Promise<QueryResult<T>> {
const sql = this.build();
this.logger.debug('Executing query', { sql });
try {
const result = await this.client.query<T>(sql);
this.reset(); // Reset for next query
return result;
} catch (error) {
this.logger.error('Query execution failed', { sql, error });
this.reset(); // Reset even on error
throw error;
}
}
/**
* Build the SQL query string
*/
public build(): string {
if (!this.query.from) {
throw new Error('FROM clause is required');
}
if (this.query.select.length === 0) {
this.query.select.push('*');
}
let sql = `SELECT ${this.query.select.join(', ')} FROM ${this.query.from}`;
// Add WHERE clause
if (this.query.where.length > 0) {
sql += ` WHERE ${this.query.where.join(' AND ')}`;
}
// Add LATEST BY (QuestDB specific - must come before GROUP BY)
if (this.query.latestBy && this.query.latestBy.length > 0) {
sql += ` LATEST BY ${this.query.latestBy.join(', ')}`;
}
// Add SAMPLE BY (QuestDB specific)
if (this.query.sampleBy) {
sql += ` SAMPLE BY ${this.query.sampleBy}`;
}
// Add GROUP BY
if (this.query.groupBy.length > 0) {
sql += ` GROUP BY ${this.query.groupBy.join(', ')}`;
}
// Add ORDER BY
if (this.query.orderBy.length > 0) {
sql += ` ORDER BY ${this.query.orderBy.join(', ')}`;
}
// Add LIMIT
if (this.query.limit) {
sql += ` LIMIT ${this.query.limit}`;
}
return sql;
}
/**
* Get the built query without executing
*/
public toSQL(): string {
return this.build();
}
// Predefined query methods for common use cases
/**
* Get latest OHLCV data for symbols
*/
public static latestOHLCV(
client: QuestDBClientInterface,
symbols: string[],
exchange?: string
): QuestDBQueryBuilder {
const builder = QuestDBQueryBuilder.create(client)
.select('symbol', 'timestamp', 'open', 'high', 'low', 'close', 'volume')
.from('ohlcv_data')
.whereSymbolIn(symbols)
.latestBy('symbol')
.orderByTimeDesc();
if (exchange) {
builder.whereExchange(exchange);
}
return builder;
}
/**
* Get OHLCV data with time sampling
*/
public static ohlcvTimeSeries(
client: QuestDBClientInterface,
symbol: string,
interval: string,
hours: number = 24
): QuestDBQueryBuilder {
return QuestDBQueryBuilder.create(client)
.selectAgg({
first_open: 'first(open)',
max_high: 'max(high)',
min_low: 'min(low)',
last_close: 'last(close)',
sum_volume: 'sum(volume)',
})
.from('ohlcv_data')
.whereSymbol(symbol)
.whereLastHours(hours)
.sampleBy(interval)
.orderByTimeDesc();
}
/**
* Get market analytics data
*/
public static marketAnalytics(
client: QuestDBClientInterface,
symbols: string[],
hours: number = 1
): QuestDBQueryBuilder {
return QuestDBQueryBuilder.create(client)
.select(
'symbol',
'timestamp',
'rsi',
'macd',
'bollinger_upper',
'bollinger_lower',
'volume_sma'
)
.from('market_analytics')
.whereSymbolIn(symbols)
.whereLastHours(hours)
.orderBy('symbol')
.orderByTimeDesc();
}
/**
* Get performance metrics for a time range
*/
public static performanceMetrics(
client: QuestDBClientInterface,
startTime: Date,
endTime: Date
): QuestDBQueryBuilder {
return QuestDBQueryBuilder.create(client)
.selectAgg({
total_trades: 'count(*)',
avg_response_time: 'avg(response_time)',
max_response_time: 'max(response_time)',
error_rate: 'sum(case when success = false then 1 else 0 end) * 100.0 / count(*)',
})
.from('performance_metrics')
.whereTimeRange(startTime, endTime)
.sampleBy('1m');
}
/**
* Get trade execution data
*/
public static tradeExecutions(
client: QuestDBClientInterface,
symbol?: string,
hours: number = 24
): QuestDBQueryBuilder {
const builder = QuestDBQueryBuilder.create(client)
.select('symbol', 'timestamp', 'side', 'quantity', 'price', 'execution_time')
.from('trade_executions')
.whereLastHours(hours)
.orderByTimeDesc();
if (symbol) {
builder.whereSymbol(symbol);
}
return builder;
}
}

View file

@ -1,404 +1,404 @@
import { getLogger } from '@stock-bot/logger';
import type { TableSchema, IndexDefinition, TableNames, QueryResult } from './types';
// Interface to avoid circular dependency
interface QuestDBClientInterface {
query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>>;
}
/**
* QuestDB Schema Manager
*
* Manages database schemas, table creation, and optimization
* for time-series data storage in QuestDB.
*/
export class QuestDBSchemaManager {
private readonly logger: ReturnType<typeof getLogger>;
private readonly schemas: Map<string, TableSchema> = new Map();
constructor(private readonly client: QuestDBClientInterface) {
this.logger = getLogger('questdb-schema-manager');
this.initializeSchemas();
}
/**
* Initialize predefined schemas
*/
private initializeSchemas(): void {
// OHLCV Data Table
this.schemas.set('ohlcv_data', {
tableName: 'ohlcv_data',
columns: [
{ name: 'symbol', type: 'SYMBOL', nullable: false },
{ name: 'exchange', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'open', type: 'DOUBLE', nullable: false },
{ name: 'high', type: 'DOUBLE', nullable: false },
{ name: 'low', type: 'DOUBLE', nullable: false },
{ name: 'close', type: 'DOUBLE', nullable: false },
{ name: 'volume', type: 'LONG', nullable: false },
{ name: 'data_source', type: 'SYMBOL', nullable: true }
],
partitionBy: 'DAY',
orderBy: ['symbol', 'timestamp'],
indices: [
{ columns: ['symbol'], type: 'HASH' },
{ columns: ['exchange'], type: 'HASH' }
]
});
// Market Analytics Table
this.schemas.set('market_analytics', {
tableName: 'market_analytics',
columns: [
{ name: 'symbol', type: 'SYMBOL', nullable: false },
{ name: 'exchange', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'rsi', type: 'DOUBLE', nullable: true },
{ name: 'macd', type: 'DOUBLE', nullable: true },
{ name: 'signal', type: 'DOUBLE', nullable: true },
{ name: 'histogram', type: 'DOUBLE', nullable: true },
{ name: 'bollinger_upper', type: 'DOUBLE', nullable: true },
{ name: 'bollinger_lower', type: 'DOUBLE', nullable: true },
{ name: 'volume_sma', type: 'DOUBLE', nullable: true },
{ name: 'timeframe', type: 'SYMBOL', nullable: true }
],
partitionBy: 'DAY',
orderBy: ['symbol', 'timestamp'],
indices: [
{ columns: ['symbol'], type: 'HASH' },
{ columns: ['timeframe'], type: 'HASH' }
]
});
// Trade Executions Table
this.schemas.set('trade_executions', {
tableName: 'trade_executions',
columns: [
{ name: 'symbol', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'side', type: 'SYMBOL', nullable: false },
{ name: 'quantity', type: 'DOUBLE', nullable: false },
{ name: 'price', type: 'DOUBLE', nullable: false },
{ name: 'execution_time', type: 'LONG', nullable: false },
{ name: 'order_id', type: 'SYMBOL', nullable: true },
{ name: 'strategy', type: 'SYMBOL', nullable: true },
{ name: 'commission', type: 'DOUBLE', nullable: true }
],
partitionBy: 'DAY',
orderBy: ['symbol', 'timestamp'],
indices: [
{ columns: ['symbol'], type: 'HASH' },
{ columns: ['order_id'], type: 'HASH' },
{ columns: ['strategy'], type: 'HASH' }
]
});
// Performance Metrics Table
this.schemas.set('performance_metrics', {
tableName: 'performance_metrics',
columns: [
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'operation', type: 'SYMBOL', nullable: false },
{ name: 'response_time', type: 'LONG', nullable: false },
{ name: 'success', type: 'BOOLEAN', nullable: false },
{ name: 'error_code', type: 'SYMBOL', nullable: true },
{ name: 'component', type: 'SYMBOL', nullable: true }
],
partitionBy: 'HOUR',
orderBy: ['operation', 'timestamp'],
indices: [
{ columns: ['operation'], type: 'HASH' },
{ columns: ['success'], type: 'HASH' }
]
});
// Portfolio Positions Table
this.schemas.set('portfolio_positions', {
tableName: 'portfolio_positions',
columns: [
{ name: 'portfolio_id', type: 'SYMBOL', nullable: false },
{ name: 'symbol', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'quantity', type: 'DOUBLE', nullable: false },
{ name: 'avg_cost', type: 'DOUBLE', nullable: false },
{ name: 'market_value', type: 'DOUBLE', nullable: false },
{ name: 'unrealized_pnl', type: 'DOUBLE', nullable: false },
{ name: 'realized_pnl', type: 'DOUBLE', nullable: false }
],
partitionBy: 'DAY',
orderBy: ['portfolio_id', 'symbol', 'timestamp'],
indices: [
{ columns: ['portfolio_id'], type: 'HASH' },
{ columns: ['symbol'], type: 'HASH' }
]
});
// Risk Metrics Table
this.schemas.set('risk_metrics', {
tableName: 'risk_metrics',
columns: [
{ name: 'portfolio_id', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'var_1d', type: 'DOUBLE', nullable: true },
{ name: 'var_5d', type: 'DOUBLE', nullable: true },
{ name: 'expected_shortfall', type: 'DOUBLE', nullable: true },
{ name: 'beta', type: 'DOUBLE', nullable: true },
{ name: 'sharpe_ratio', type: 'DOUBLE', nullable: true },
{ name: 'max_drawdown', type: 'DOUBLE', nullable: true },
{ name: 'volatility', type: 'DOUBLE', nullable: true }
],
partitionBy: 'DAY',
orderBy: ['portfolio_id', 'timestamp'],
indices: [
{ columns: ['portfolio_id'], type: 'HASH' }
]
});
}
/**
* Create all tables
*/
public async createAllTables(): Promise<void> {
this.logger.info('Creating all QuestDB tables');
for (const [tableName, schema] of this.schemas) {
try {
await this.createTable(schema);
this.logger.info(`Table ${tableName} created successfully`);
} catch (error) {
this.logger.error(`Failed to create table ${tableName}`, error);
throw error;
}
}
}
/**
* Create a single table
*/
public async createTable(schema: TableSchema): Promise<void> {
const sql = this.buildCreateTableSQL(schema);
try {
await this.client.query(sql);
this.logger.info(`Table ${schema.tableName} created`, { sql });
} catch (error) {
// Check if table already exists
if (error instanceof Error && error.message.includes('already exists')) {
this.logger.info(`Table ${schema.tableName} already exists`);
return;
}
throw error;
}
}
/**
* Drop a table
*/
public async dropTable(tableName: string): Promise<void> {
const sql = `DROP TABLE IF EXISTS ${tableName}`;
try {
await this.client.query(sql);
this.logger.info(`Table ${tableName} dropped`);
} catch (error) {
this.logger.error(`Failed to drop table ${tableName}`, error);
throw error;
}
}
/**
* Check if table exists
*/
public async tableExists(tableName: string): Promise<boolean> {
try {
const result = await this.client.query(`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_name = '${tableName}'
`);
return result.rows.length > 0 && result.rows[0].count > 0;
} catch (error) {
this.logger.error(`Error checking if table exists: ${tableName}`, error);
return false;
}
}
/**
* Get table schema
*/
public getSchema(tableName: string): TableSchema | undefined {
return this.schemas.get(tableName);
}
/**
* Add custom schema
*/
public addSchema(schema: TableSchema): void {
this.schemas.set(schema.tableName, schema);
this.logger.info(`Schema added for table: ${schema.tableName}`);
}
/**
* Get all schema names
*/
public getSchemaNames(): string[] {
return Array.from(this.schemas.keys());
}
/**
* Optimize table (rebuild indices, etc.)
*/
public async optimizeTable(tableName: string): Promise<void> {
const schema = this.schemas.get(tableName);
if (!schema) {
throw new Error(`Schema not found for table: ${tableName}`);
}
// QuestDB automatically optimizes, but we can analyze table stats
try {
const stats = await this.getTableStats(tableName);
this.logger.info(`Table ${tableName} stats`, stats);
} catch (error) {
this.logger.error(`Failed to optimize table ${tableName}`, error);
throw error;
}
}
/**
* Get table statistics
*/
public async getTableStats(tableName: string): Promise<any> {
try {
const result = await this.client.query(`
SELECT
COUNT(*) as row_count,
MIN(timestamp) as min_timestamp,
MAX(timestamp) as max_timestamp
FROM ${tableName}
`);
return result.rows[0] || {};
} catch (error) {
this.logger.error(`Failed to get table stats for ${tableName}`, error);
throw error;
}
}
/**
* Truncate table (remove all data but keep structure)
*/
public async truncateTable(tableName: string): Promise<void> {
try {
await this.client.query(`TRUNCATE TABLE ${tableName}`);
this.logger.info(`Table ${tableName} truncated`);
} catch (error) {
this.logger.error(`Failed to truncate table ${tableName}`, error);
throw error;
}
}
/**
* Create table partitions for future dates
*/
public async createPartitions(tableName: string, days: number = 30): Promise<void> {
// QuestDB handles partitioning automatically based on the PARTITION BY clause
// This method is for future extensibility
this.logger.info(`Partitioning is automatic for table ${tableName}`);
}
/**
* Build CREATE TABLE SQL statement
*/
private buildCreateTableSQL(schema: TableSchema): string {
const columns = schema.columns.map(col => {
let columnDef = `${col.name} ${col.type}`;
if (!col.nullable) {
columnDef += ' NOT NULL';
}
return columnDef;
}).join(', ');
let sql = `CREATE TABLE IF NOT EXISTS ${schema.tableName} (${columns})`;
// Add designated timestamp
const timestampColumn = schema.columns.find(col => col.designated);
if (timestampColumn) {
sql += ` timestamp(${timestampColumn.name})`;
}
// Add partition by
if (schema.partitionBy) {
sql += ` PARTITION BY ${schema.partitionBy}`;
}
return sql;
}
/**
* Build index creation SQL (for future use)
*/
private buildCreateIndexSQL(tableName: string, index: IndexDefinition): string {
const indexName = `idx_${tableName}_${index.columns.join('_')}`;
const columns = index.columns.join(', ');
// QuestDB uses different index syntax, this is for future compatibility
return `CREATE INDEX ${indexName} ON ${tableName} (${columns})`;
}
/**
* Validate schema definition
*/
private validateSchema(schema: TableSchema): void {
if (!schema.tableName) {
throw new Error('Table name is required');
}
if (!schema.columns || schema.columns.length === 0) {
throw new Error('At least one column is required');
}
const timestampColumns = schema.columns.filter(col => col.designated);
if (timestampColumns.length > 1) {
throw new Error('Only one designated timestamp column is allowed');
}
if (timestampColumns.length === 0) {
throw new Error('A designated timestamp column is required for time-series tables');
}
}
/**
* Get table creation status
*/
public async getTableCreationStatus(): Promise<Record<string, boolean>> {
const status: Record<string, boolean> = {};
for (const tableName of this.schemas.keys()) {
status[tableName] = await this.tableExists(tableName);
}
return status;
}
/**
* Initialize database schema
*/
public async initializeDatabase(): Promise<void> {
this.logger.info('Initializing QuestDB schema');
// Validate all schemas first
for (const schema of this.schemas.values()) {
this.validateSchema(schema);
}
// Create all tables
await this.createAllTables();
// Get creation status
const status = await this.getTableCreationStatus();
this.logger.info('Database initialization complete', { tableStatus: status });
}
}
import { getLogger } from '@stock-bot/logger';
import type { IndexDefinition, QueryResult, TableNames, TableSchema } from './types';
// Interface to avoid circular dependency
interface QuestDBClientInterface {
query<T = any>(sql: string, params?: any[]): Promise<QueryResult<T>>;
}
/**
* QuestDB Schema Manager
*
* Manages database schemas, table creation, and optimization
* for time-series data storage in QuestDB.
*/
export class QuestDBSchemaManager {
private readonly logger: ReturnType<typeof getLogger>;
private readonly schemas: Map<string, TableSchema> = new Map();
constructor(private readonly client: QuestDBClientInterface) {
this.logger = getLogger('questdb-schema-manager');
this.initializeSchemas();
}
/**
* Initialize predefined schemas
*/
private initializeSchemas(): void {
// OHLCV Data Table
this.schemas.set('ohlcv_data', {
tableName: 'ohlcv_data',
columns: [
{ name: 'symbol', type: 'SYMBOL', nullable: false },
{ name: 'exchange', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'open', type: 'DOUBLE', nullable: false },
{ name: 'high', type: 'DOUBLE', nullable: false },
{ name: 'low', type: 'DOUBLE', nullable: false },
{ name: 'close', type: 'DOUBLE', nullable: false },
{ name: 'volume', type: 'LONG', nullable: false },
{ name: 'data_source', type: 'SYMBOL', nullable: true },
],
partitionBy: 'DAY',
orderBy: ['symbol', 'timestamp'],
indices: [
{ columns: ['symbol'], type: 'HASH' },
{ columns: ['exchange'], type: 'HASH' },
],
});
// Market Analytics Table
this.schemas.set('market_analytics', {
tableName: 'market_analytics',
columns: [
{ name: 'symbol', type: 'SYMBOL', nullable: false },
{ name: 'exchange', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'rsi', type: 'DOUBLE', nullable: true },
{ name: 'macd', type: 'DOUBLE', nullable: true },
{ name: 'signal', type: 'DOUBLE', nullable: true },
{ name: 'histogram', type: 'DOUBLE', nullable: true },
{ name: 'bollinger_upper', type: 'DOUBLE', nullable: true },
{ name: 'bollinger_lower', type: 'DOUBLE', nullable: true },
{ name: 'volume_sma', type: 'DOUBLE', nullable: true },
{ name: 'timeframe', type: 'SYMBOL', nullable: true },
],
partitionBy: 'DAY',
orderBy: ['symbol', 'timestamp'],
indices: [
{ columns: ['symbol'], type: 'HASH' },
{ columns: ['timeframe'], type: 'HASH' },
],
});
// Trade Executions Table
this.schemas.set('trade_executions', {
tableName: 'trade_executions',
columns: [
{ name: 'symbol', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'side', type: 'SYMBOL', nullable: false },
{ name: 'quantity', type: 'DOUBLE', nullable: false },
{ name: 'price', type: 'DOUBLE', nullable: false },
{ name: 'execution_time', type: 'LONG', nullable: false },
{ name: 'order_id', type: 'SYMBOL', nullable: true },
{ name: 'strategy', type: 'SYMBOL', nullable: true },
{ name: 'commission', type: 'DOUBLE', nullable: true },
],
partitionBy: 'DAY',
orderBy: ['symbol', 'timestamp'],
indices: [
{ columns: ['symbol'], type: 'HASH' },
{ columns: ['order_id'], type: 'HASH' },
{ columns: ['strategy'], type: 'HASH' },
],
});
// Performance Metrics Table
this.schemas.set('performance_metrics', {
tableName: 'performance_metrics',
columns: [
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'operation', type: 'SYMBOL', nullable: false },
{ name: 'response_time', type: 'LONG', nullable: false },
{ name: 'success', type: 'BOOLEAN', nullable: false },
{ name: 'error_code', type: 'SYMBOL', nullable: true },
{ name: 'component', type: 'SYMBOL', nullable: true },
],
partitionBy: 'HOUR',
orderBy: ['operation', 'timestamp'],
indices: [
{ columns: ['operation'], type: 'HASH' },
{ columns: ['success'], type: 'HASH' },
],
});
// Portfolio Positions Table
this.schemas.set('portfolio_positions', {
tableName: 'portfolio_positions',
columns: [
{ name: 'portfolio_id', type: 'SYMBOL', nullable: false },
{ name: 'symbol', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'quantity', type: 'DOUBLE', nullable: false },
{ name: 'avg_cost', type: 'DOUBLE', nullable: false },
{ name: 'market_value', type: 'DOUBLE', nullable: false },
{ name: 'unrealized_pnl', type: 'DOUBLE', nullable: false },
{ name: 'realized_pnl', type: 'DOUBLE', nullable: false },
],
partitionBy: 'DAY',
orderBy: ['portfolio_id', 'symbol', 'timestamp'],
indices: [
{ columns: ['portfolio_id'], type: 'HASH' },
{ columns: ['symbol'], type: 'HASH' },
],
});
// Risk Metrics Table
this.schemas.set('risk_metrics', {
tableName: 'risk_metrics',
columns: [
{ name: 'portfolio_id', type: 'SYMBOL', nullable: false },
{ name: 'timestamp', type: 'TIMESTAMP', nullable: false, designated: true },
{ name: 'var_1d', type: 'DOUBLE', nullable: true },
{ name: 'var_5d', type: 'DOUBLE', nullable: true },
{ name: 'expected_shortfall', type: 'DOUBLE', nullable: true },
{ name: 'beta', type: 'DOUBLE', nullable: true },
{ name: 'sharpe_ratio', type: 'DOUBLE', nullable: true },
{ name: 'max_drawdown', type: 'DOUBLE', nullable: true },
{ name: 'volatility', type: 'DOUBLE', nullable: true },
],
partitionBy: 'DAY',
orderBy: ['portfolio_id', 'timestamp'],
indices: [{ columns: ['portfolio_id'], type: 'HASH' }],
});
}
/**
* Create all tables
*/
public async createAllTables(): Promise<void> {
this.logger.info('Creating all QuestDB tables');
for (const [tableName, schema] of this.schemas) {
try {
await this.createTable(schema);
this.logger.info(`Table ${tableName} created successfully`);
} catch (error) {
this.logger.error(`Failed to create table ${tableName}`, error);
throw error;
}
}
}
/**
* Create a single table
*/
public async createTable(schema: TableSchema): Promise<void> {
const sql = this.buildCreateTableSQL(schema);
try {
await this.client.query(sql);
this.logger.info(`Table ${schema.tableName} created`, { sql });
} catch (error) {
// Check if table already exists
if (error instanceof Error && error.message.includes('already exists')) {
this.logger.info(`Table ${schema.tableName} already exists`);
return;
}
throw error;
}
}
/**
* Drop a table
*/
public async dropTable(tableName: string): Promise<void> {
const sql = `DROP TABLE IF EXISTS ${tableName}`;
try {
await this.client.query(sql);
this.logger.info(`Table ${tableName} dropped`);
} catch (error) {
this.logger.error(`Failed to drop table ${tableName}`, error);
throw error;
}
}
/**
* Check if table exists
*/
public async tableExists(tableName: string): Promise<boolean> {
try {
const result = await this.client.query(`
SELECT COUNT(*) as count
FROM information_schema.tables
WHERE table_name = '${tableName}'
`);
return result.rows.length > 0 && result.rows[0].count > 0;
} catch (error) {
this.logger.error(`Error checking if table exists: ${tableName}`, error);
return false;
}
}
/**
* Get table schema
*/
public getSchema(tableName: string): TableSchema | undefined {
return this.schemas.get(tableName);
}
/**
* Add custom schema
*/
public addSchema(schema: TableSchema): void {
this.schemas.set(schema.tableName, schema);
this.logger.info(`Schema added for table: ${schema.tableName}`);
}
/**
* Get all schema names
*/
public getSchemaNames(): string[] {
return Array.from(this.schemas.keys());
}
/**
* Optimize table (rebuild indices, etc.)
*/
public async optimizeTable(tableName: string): Promise<void> {
const schema = this.schemas.get(tableName);
if (!schema) {
throw new Error(`Schema not found for table: ${tableName}`);
}
// QuestDB automatically optimizes, but we can analyze table stats
try {
const stats = await this.getTableStats(tableName);
this.logger.info(`Table ${tableName} stats`, stats);
} catch (error) {
this.logger.error(`Failed to optimize table ${tableName}`, error);
throw error;
}
}
/**
* Get table statistics
*/
public async getTableStats(tableName: string): Promise<any> {
try {
const result = await this.client.query(`
SELECT
COUNT(*) as row_count,
MIN(timestamp) as min_timestamp,
MAX(timestamp) as max_timestamp
FROM ${tableName}
`);
return result.rows[0] || {};
} catch (error) {
this.logger.error(`Failed to get table stats for ${tableName}`, error);
throw error;
}
}
/**
* Truncate table (remove all data but keep structure)
*/
public async truncateTable(tableName: string): Promise<void> {
try {
await this.client.query(`TRUNCATE TABLE ${tableName}`);
this.logger.info(`Table ${tableName} truncated`);
} catch (error) {
this.logger.error(`Failed to truncate table ${tableName}`, error);
throw error;
}
}
/**
* Create table partitions for future dates
*/
public async createPartitions(tableName: string, days: number = 30): Promise<void> {
// QuestDB handles partitioning automatically based on the PARTITION BY clause
// This method is for future extensibility
this.logger.info(`Partitioning is automatic for table ${tableName}`);
}
/**
* Build CREATE TABLE SQL statement
*/
private buildCreateTableSQL(schema: TableSchema): string {
const columns = schema.columns
.map(col => {
let columnDef = `${col.name} ${col.type}`;
if (!col.nullable) {
columnDef += ' NOT NULL';
}
return columnDef;
})
.join(', ');
let sql = `CREATE TABLE IF NOT EXISTS ${schema.tableName} (${columns})`;
// Add designated timestamp
const timestampColumn = schema.columns.find(col => col.designated);
if (timestampColumn) {
sql += ` timestamp(${timestampColumn.name})`;
}
// Add partition by
if (schema.partitionBy) {
sql += ` PARTITION BY ${schema.partitionBy}`;
}
return sql;
}
/**
* Build index creation SQL (for future use)
*/
private buildCreateIndexSQL(tableName: string, index: IndexDefinition): string {
const indexName = `idx_${tableName}_${index.columns.join('_')}`;
const columns = index.columns.join(', ');
// QuestDB uses different index syntax, this is for future compatibility
return `CREATE INDEX ${indexName} ON ${tableName} (${columns})`;
}
/**
* Validate schema definition
*/
private validateSchema(schema: TableSchema): void {
if (!schema.tableName) {
throw new Error('Table name is required');
}
if (!schema.columns || schema.columns.length === 0) {
throw new Error('At least one column is required');
}
const timestampColumns = schema.columns.filter(col => col.designated);
if (timestampColumns.length > 1) {
throw new Error('Only one designated timestamp column is allowed');
}
if (timestampColumns.length === 0) {
throw new Error('A designated timestamp column is required for time-series tables');
}
}
/**
* Get table creation status
*/
public async getTableCreationStatus(): Promise<Record<string, boolean>> {
const status: Record<string, boolean> = {};
for (const tableName of this.schemas.keys()) {
status[tableName] = await this.tableExists(tableName);
}
return status;
}
/**
* Initialize database schema
*/
public async initializeDatabase(): Promise<void> {
this.logger.info('Initializing QuestDB schema');
// Validate all schemas first
for (const schema of this.schemas.values()) {
this.validateSchema(schema);
}
// Create all tables
await this.createAllTables();
// Get creation status
const status = await this.getTableCreationStatus();
this.logger.info('Database initialization complete', { tableStatus: status });
}
}

View file

@ -1,284 +1,304 @@
/**
* QuestDB Client Configuration and Types
*/
/**
* QuestDB Client Configuration
*/
export interface QuestDBClientConfig {
host: string;
httpPort: number;
pgPort: number;
influxPort: number;
user?: string;
password?: string;
database?: string;
tls?: {
enabled: boolean;
verifyServerCert: boolean;
};
timeouts?: {
connection: number;
request: number;
};
retryAttempts?: number;
}
/**
* QuestDB Connection Options
*/
export interface QuestDBConnectionOptions {
protocol?: 'http' | 'pg' | 'influx';
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type QuestDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface QuestDBHealthCheck {
status: QuestDBHealthStatus;
timestamp: Date;
latency: number;
protocols: {
http: boolean;
pg: boolean;
influx: boolean;
};
errors?: string[];
}
export interface QuestDBMetrics {
queriesPerSecond: number;
insertsPerSecond: number;
averageQueryTime: number;
errorRate: number;
dataIngestionRate: number;
storageSize: number;
}
/**
* Table Names for Time-Series Data
*/
export type TableNames =
| 'ohlcv'
| 'trades'
| 'quotes'
| 'indicators'
| 'performance'
| 'risk_metrics'
| 'market_events'
| 'strategy_signals'
| 'portfolio_snapshots';
/**
* Time-Series Data Types
*/
export interface BaseTimeSeriesData {
timestamp: Date;
symbol?: string;
}
export interface OHLCVData extends BaseTimeSeriesData {
open: number;
high: number;
low: number;
close: number;
volume: number;
timeframe: string; // '1m', '5m', '1h', '1d', etc.
source: string;
}
export interface TradeData extends BaseTimeSeriesData {
trade_id: string;
price: number;
quantity: number;
side: 'buy' | 'sell';
exchange: string;
conditions?: string[];
}
export interface QuoteData extends BaseTimeSeriesData {
bid_price: number;
bid_size: number;
ask_price: number;
ask_size: number;
exchange: string;
spread: number;
}
export interface IndicatorData extends BaseTimeSeriesData {
indicator_name: string;
value: number;
parameters?: Record<string, any>;
timeframe: string;
}
export interface PerformanceData extends BaseTimeSeriesData {
portfolio_id: string;
total_value: number;
cash_balance: number;
unrealized_pnl: number;
realized_pnl: number;
daily_return: number;
cumulative_return: number;
}
export interface RiskMetrics extends BaseTimeSeriesData {
portfolio_id?: string;
strategy_id?: string;
metric_name: string;
value: number;
threshold?: number;
status: 'normal' | 'warning' | 'breach';
}
/**
* Query Result Types
*/
export interface QueryResult<T = any> {
rows: T[];
rowCount: number;
executionTime: number;
metadata?: {
columns: Array<{
name: string;
type: string;
}>;
};
}
export interface InsertResult {
rowsInserted: number;
executionTime: number;
errors?: string[];
}
/**
* Schema Definition Types
*/
export interface ColumnDefinition {
name: string;
type: 'SYMBOL' | 'STRING' | 'DOUBLE' | 'FLOAT' | 'LONG' | 'INT' | 'BOOLEAN' | 'TIMESTAMP' | 'DATE' | 'BINARY';
indexed?: boolean;
capacity?: number; // For SYMBOL type
}
export interface TableDefinition {
name: string;
columns: ColumnDefinition[];
partitionBy?: 'NONE' | 'DAY' | 'MONTH' | 'YEAR';
timestamp?: string; // Column name to use as designated timestamp
dedup?: boolean;
}
/**
* Connection Pool Types
*/
export interface ConnectionPoolConfig {
minConnections: number;
maxConnections: number;
idleTimeout: number;
acquireTimeout: number;
}
/**
* Health Monitoring Types
*/
export interface HealthStatus {
isHealthy: boolean;
lastCheck: Date;
responseTime: number;
message: string;
error?: Error;
details?: {
pgPool: boolean;
httpEndpoint: boolean;
uptime: number;
};
}
export interface PerformanceMetrics {
totalQueries: number;
successfulQueries: number;
failedQueries: number;
averageResponseTime: number;
lastQueryTime: Date | null;
connectionUptime: number;
memoryUsage: number;
}
/**
* Query Builder Types
*/
export interface TimeSeriesQuery {
table: TableNames | string;
columns?: string[];
timeRange?: TimeRange;
groupBy?: string[];
aggregations?: Record<string, string>;
sampleBy?: string;
latestBy?: string[];
orderBy?: Array<{ column: string; direction: 'ASC' | 'DESC' }>;
limit?: number;
}
export interface AggregationQuery {
aggregations: Record<string, string>;
groupBy?: string[];
having?: string[];
}
export interface TimeRange {
startTime: Date;
endTime: Date;
}
/**
* InfluxDB Line Protocol Types
*/
export interface InfluxLineData {
measurement: string;
tags: Record<string, string>;
fields: Record<string, number | string | boolean>;
timestamp?: Date;
}
export interface InfluxWriteOptions {
batchSize?: number;
flushInterval?: number;
autoFlush?: boolean;
precision?: 'ns' | 'us' | 'ms' | 's';
retryAttempts?: number;
retryDelay?: number;
}
/**
* Schema Management Types
*/
export interface TableSchema {
tableName: string;
columns: ColumnSchema[];
partitionBy?: 'NONE' | 'HOUR' | 'DAY' | 'MONTH' | 'YEAR';
orderBy?: string[];
indices?: IndexDefinition[];
dedup?: boolean;
}
export interface ColumnSchema {
name: string;
type: 'SYMBOL' | 'STRING' | 'DOUBLE' | 'FLOAT' | 'LONG' | 'INT' | 'BOOLEAN' | 'TIMESTAMP' | 'DATE' | 'BINARY';
nullable?: boolean;
designated?: boolean; // For designated timestamp column
capacity?: number; // For SYMBOL type
indexed?: boolean;
}
export interface IndexDefinition {
columns: string[];
type: 'HASH' | 'BTREE';
unique?: boolean;
}
/**
* QuestDB Client Configuration and Types
*/
/**
* QuestDB Client Configuration
*/
export interface QuestDBClientConfig {
host: string;
httpPort: number;
pgPort: number;
influxPort: number;
user?: string;
password?: string;
database?: string;
tls?: {
enabled: boolean;
verifyServerCert: boolean;
};
timeouts?: {
connection: number;
request: number;
};
retryAttempts?: number;
}
/**
* QuestDB Connection Options
*/
export interface QuestDBConnectionOptions {
protocol?: 'http' | 'pg' | 'influx';
retryAttempts?: number;
retryDelay?: number;
healthCheckInterval?: number;
}
/**
* Health Status Types
*/
export type QuestDBHealthStatus = 'healthy' | 'degraded' | 'unhealthy';
export interface QuestDBHealthCheck {
status: QuestDBHealthStatus;
timestamp: Date;
latency: number;
protocols: {
http: boolean;
pg: boolean;
influx: boolean;
};
errors?: string[];
}
export interface QuestDBMetrics {
queriesPerSecond: number;
insertsPerSecond: number;
averageQueryTime: number;
errorRate: number;
dataIngestionRate: number;
storageSize: number;
}
/**
* Table Names for Time-Series Data
*/
export type TableNames =
| 'ohlcv'
| 'trades'
| 'quotes'
| 'indicators'
| 'performance'
| 'risk_metrics'
| 'market_events'
| 'strategy_signals'
| 'portfolio_snapshots';
/**
* Time-Series Data Types
*/
export interface BaseTimeSeriesData {
timestamp: Date;
symbol?: string;
}
export interface OHLCVData extends BaseTimeSeriesData {
open: number;
high: number;
low: number;
close: number;
volume: number;
timeframe: string; // '1m', '5m', '1h', '1d', etc.
source: string;
}
export interface TradeData extends BaseTimeSeriesData {
trade_id: string;
price: number;
quantity: number;
side: 'buy' | 'sell';
exchange: string;
conditions?: string[];
}
export interface QuoteData extends BaseTimeSeriesData {
bid_price: number;
bid_size: number;
ask_price: number;
ask_size: number;
exchange: string;
spread: number;
}
export interface IndicatorData extends BaseTimeSeriesData {
indicator_name: string;
value: number;
parameters?: Record<string, any>;
timeframe: string;
}
export interface PerformanceData extends BaseTimeSeriesData {
portfolio_id: string;
total_value: number;
cash_balance: number;
unrealized_pnl: number;
realized_pnl: number;
daily_return: number;
cumulative_return: number;
}
export interface RiskMetrics extends BaseTimeSeriesData {
portfolio_id?: string;
strategy_id?: string;
metric_name: string;
value: number;
threshold?: number;
status: 'normal' | 'warning' | 'breach';
}
/**
* Query Result Types
*/
export interface QueryResult<T = any> {
rows: T[];
rowCount: number;
executionTime: number;
metadata?: {
columns: Array<{
name: string;
type: string;
}>;
};
}
export interface InsertResult {
rowsInserted: number;
executionTime: number;
errors?: string[];
}
/**
* Schema Definition Types
*/
export interface ColumnDefinition {
name: string;
type:
| 'SYMBOL'
| 'STRING'
| 'DOUBLE'
| 'FLOAT'
| 'LONG'
| 'INT'
| 'BOOLEAN'
| 'TIMESTAMP'
| 'DATE'
| 'BINARY';
indexed?: boolean;
capacity?: number; // For SYMBOL type
}
export interface TableDefinition {
name: string;
columns: ColumnDefinition[];
partitionBy?: 'NONE' | 'DAY' | 'MONTH' | 'YEAR';
timestamp?: string; // Column name to use as designated timestamp
dedup?: boolean;
}
/**
* Connection Pool Types
*/
export interface ConnectionPoolConfig {
minConnections: number;
maxConnections: number;
idleTimeout: number;
acquireTimeout: number;
}
/**
* Health Monitoring Types
*/
export interface HealthStatus {
isHealthy: boolean;
lastCheck: Date;
responseTime: number;
message: string;
error?: Error;
details?: {
pgPool: boolean;
httpEndpoint: boolean;
uptime: number;
};
}
export interface PerformanceMetrics {
totalQueries: number;
successfulQueries: number;
failedQueries: number;
averageResponseTime: number;
lastQueryTime: Date | null;
connectionUptime: number;
memoryUsage: number;
}
/**
* Query Builder Types
*/
export interface TimeSeriesQuery {
table: TableNames | string;
columns?: string[];
timeRange?: TimeRange;
groupBy?: string[];
aggregations?: Record<string, string>;
sampleBy?: string;
latestBy?: string[];
orderBy?: Array<{ column: string; direction: 'ASC' | 'DESC' }>;
limit?: number;
}
export interface AggregationQuery {
aggregations: Record<string, string>;
groupBy?: string[];
having?: string[];
}
export interface TimeRange {
startTime: Date;
endTime: Date;
}
/**
* InfluxDB Line Protocol Types
*/
export interface InfluxLineData {
measurement: string;
tags: Record<string, string>;
fields: Record<string, number | string | boolean>;
timestamp?: Date;
}
export interface InfluxWriteOptions {
batchSize?: number;
flushInterval?: number;
autoFlush?: boolean;
precision?: 'ns' | 'us' | 'ms' | 's';
retryAttempts?: number;
retryDelay?: number;
}
/**
* Schema Management Types
*/
export interface TableSchema {
tableName: string;
columns: ColumnSchema[];
partitionBy?: 'NONE' | 'HOUR' | 'DAY' | 'MONTH' | 'YEAR';
orderBy?: string[];
indices?: IndexDefinition[];
dedup?: boolean;
}
export interface ColumnSchema {
name: string;
type:
| 'SYMBOL'
| 'STRING'
| 'DOUBLE'
| 'FLOAT'
| 'LONG'
| 'INT'
| 'BOOLEAN'
| 'TIMESTAMP'
| 'DATE'
| 'BINARY';
nullable?: boolean;
designated?: boolean; // For designated timestamp column
capacity?: number; // For SYMBOL type
indexed?: boolean;
}
export interface IndexDefinition {
columns: string[];
type: 'HASH' | 'BTREE';
unique?: boolean;
}

View file

@ -1,239 +1,251 @@
/**
* QuestDB Client Integration Test
*
* This test validates that all components work together correctly
* without requiring an actual QuestDB instance.
*/
import { describe, it, expect, beforeEach, afterEach, mock, spyOn } from 'bun:test';
import {
QuestDBClient,
QuestDBHealthMonitor,
QuestDBQueryBuilder,
QuestDBInfluxWriter,
QuestDBSchemaManager,
createQuestDBClient
} from '../src';
import { questdbTestHelpers } from './setup';
describe('QuestDB Client Integration', () => {
let client: QuestDBClient; beforeEach(() => {
client = new QuestDBClient({
host: 'localhost',
httpPort: 9000,
pgPort: 8812,
influxPort: 9009,
database: 'questdb',
user: 'admin',
password: 'quest'
});
}); afterEach(async () => {
if (client && client.connected) {
try {
await client.disconnect();
} catch (error) {
// Ignore cleanup errors in tests
}
}
});
describe('Client Initialization', () => {
it('should create client with factory function', () => {
const factoryClient = createQuestDBClient();
expect(factoryClient).toBeInstanceOf(QuestDBClient);
});
it('should initialize all supporting classes', () => {
expect(client.getHealthMonitor()).toBeInstanceOf(QuestDBHealthMonitor);
expect(client.queryBuilder()).toBeInstanceOf(QuestDBQueryBuilder);
expect(client.getInfluxWriter()).toBeInstanceOf(QuestDBInfluxWriter);
expect(client.getSchemaManager()).toBeInstanceOf(QuestDBSchemaManager);
});
it('should handle connection configuration', () => {
expect(client.getHttpUrl()).toBe('http://localhost:9000');
expect(client.getInfluxUrl()).toBe('http://localhost:9009');
expect(client.connected).toBe(false);
});
});
describe('Query Builder', () => {
it('should build query using query builder', () => {
const query = client.queryBuilder()
.select('symbol', 'close', 'timestamp')
.from('ohlcv')
.whereSymbol('AAPL')
.whereLastHours(24)
.orderBy('timestamp', 'DESC')
.limit(100)
.build();
expect(query).toContain('SELECT symbol, close, timestamp');
expect(query).toContain('FROM ohlcv');
expect(query).toContain("symbol = 'AAPL'");
expect(query).toContain('ORDER BY timestamp DESC');
expect(query).toContain('LIMIT 100');
expect(questdbTestHelpers.validateQuestDBQuery(query)).toBe(true);
});
it('should build time-series specific queries', () => {
const latestQuery = client.queryBuilder()
.select('*')
.from('ohlcv')
.latestBy('symbol')
.build();
expect(latestQuery).toContain('LATEST BY symbol');
expect(questdbTestHelpers.validateQuestDBQuery(latestQuery)).toBe(true);
const sampleQuery = client.queryBuilder()
.select('symbol', 'avg(close)')
.from('ohlcv')
.sampleBy('1d')
.build();
expect(sampleQuery).toContain('SAMPLE BY 1d');
expect(questdbTestHelpers.validateQuestDBQuery(sampleQuery)).toBe(true);
});
it('should build aggregation queries', () => {
const query = client.aggregate('ohlcv')
.select('symbol', 'avg(close) as avg_price', 'max(high) as max_high')
.whereSymbolIn(['AAPL', 'GOOGL'])
.groupBy('symbol')
.sampleBy('1h')
.build();
expect(query).toContain('SELECT symbol, avg(close) as avg_price, max(high) as max_high');
expect(query).toContain('FROM ohlcv');
expect(query).toContain("symbol IN ('AAPL', 'GOOGL')");
expect(query).toContain('SAMPLE BY 1h');
expect(query).toContain('GROUP BY symbol');
expect(questdbTestHelpers.validateQuestDBQuery(query)).toBe(true);
});
});
describe('InfluxDB Writer', () => {
it('should write OHLCV data using InfluxDB line protocol', async () => {
const ohlcvData = [{
timestamp: new Date('2024-01-01T12:00:00Z'),
open: 150.00,
high: 152.00,
low: 149.50,
close: 151.50,
volume: 1000000
}];
// Mock the actual write operation
const writeSpy = spyOn(client.getInfluxWriter(), 'writeOHLCV');
writeSpy.mockReturnValue(Promise.resolve()); await expect(async () => {
await client.writeOHLCV('AAPL', 'NASDAQ', ohlcvData);
}).not.toThrow();
});
it('should handle batch operations', () => {
const lines = questdbTestHelpers.generateInfluxDBLines(3);
expect(lines.length).toBe(3);
lines.forEach(line => {
expect(line).toContain('ohlcv,symbol=TEST');
expect(line).toMatch(/\d{19}$/); // Nanosecond timestamp
}); });
});
describe('Schema Manager', () => {
it('should provide schema access', () => {
const schema = client.getSchemaManager().getSchema('ohlcv_data');
expect(schema).toBeDefined();
expect(schema?.tableName).toBe('ohlcv_data');
const symbolColumn = schema?.columns.find(col => col.name === 'symbol');
expect(symbolColumn).toBeDefined();
expect(symbolColumn?.type).toBe('SYMBOL');
expect(schema?.partitionBy).toBe('DAY'); });
});
describe('Health Monitor', () => {
it('should provide health monitoring capabilities', async () => {
const healthMonitor = client.getHealthMonitor();
expect(healthMonitor).toBeInstanceOf(QuestDBHealthMonitor);
// Mock health status since we're not connected
const mockHealthStatus = {
isHealthy: false,
lastCheck: new Date(),
responseTime: 100,
message: 'Connection not established',
details: {
pgPool: false,
httpEndpoint: false,
uptime: 0 }
};
const healthSpy = spyOn(healthMonitor, 'getHealthStatus');
healthSpy.mockReturnValue(Promise.resolve(mockHealthStatus));
const health = await healthMonitor.getHealthStatus();
expect(health.isHealthy).toBe(false);
expect(health.lastCheck).toBeInstanceOf(Date);
expect(health.message).toBe('Connection not established');
});
});
describe('Time-Series Operations', () => {
it('should support latest by operations', async () => {
// Mock the query execution
const mockResult = {
rows: [{ symbol: 'AAPL', close: 150.00, timestamp: new Date() }],
rowCount: 1,
executionTime: 10,
metadata: { columns: [] }
};
const querySpy = spyOn(client, 'query');
querySpy.mockReturnValue(Promise.resolve(mockResult));
const result = await client.latestBy('ohlcv', ['symbol', 'close'], 'symbol'); expect(result.rows.length).toBe(1);
expect(result.rows[0].symbol).toBe('AAPL');
});
it('should support sample by operations', async () => {
// Mock the query execution
const mockResult = {
rows: [
{ symbol: 'AAPL', avg_close: 150.00, timestamp: new Date() }
],
rowCount: 1,
executionTime: 15,
metadata: { columns: [] }
};
const querySpy = spyOn(client, 'query');
querySpy.mockReturnValue(Promise.resolve(mockResult)); const result = await client.sampleBy(
'ohlcv',
['symbol', 'avg(close) as avg_close'],
'1h',
'timestamp',
"symbol = 'AAPL'"
);
expect(result.rows.length).toBe(1);
expect(result.executionTime).toBe(15);
});
});
describe('Connection Management', () => {
it('should handle connection configuration', () => {
expect(client.getHttpUrl()).toBe('http://localhost:9000');
expect(client.getInfluxUrl()).toBe('http://localhost:9009');
expect(client.connected).toBe(false);
});
it('should provide configuration access', () => {
const config = client.configuration;
expect(config.host).toBe('localhost');
expect(config.httpPort).toBe(9000);
expect(config.user).toBe('admin');
});
});
});
/**
* QuestDB Client Integration Test
*
* This test validates that all components work together correctly
* without requiring an actual QuestDB instance.
*/
import { afterEach, beforeEach, describe, expect, it, mock, spyOn } from 'bun:test';
import {
createQuestDBClient,
QuestDBClient,
QuestDBHealthMonitor,
QuestDBInfluxWriter,
QuestDBQueryBuilder,
QuestDBSchemaManager,
} from '../src';
import { questdbTestHelpers } from './setup';
describe('QuestDB Client Integration', () => {
let client: QuestDBClient;
beforeEach(() => {
client = new QuestDBClient({
host: 'localhost',
httpPort: 9000,
pgPort: 8812,
influxPort: 9009,
database: 'questdb',
user: 'admin',
password: 'quest',
});
});
afterEach(async () => {
if (client && client.connected) {
try {
await client.disconnect();
} catch (error) {
// Ignore cleanup errors in tests
}
}
});
describe('Client Initialization', () => {
it('should create client with factory function', () => {
const factoryClient = createQuestDBClient();
expect(factoryClient).toBeInstanceOf(QuestDBClient);
});
it('should initialize all supporting classes', () => {
expect(client.getHealthMonitor()).toBeInstanceOf(QuestDBHealthMonitor);
expect(client.queryBuilder()).toBeInstanceOf(QuestDBQueryBuilder);
expect(client.getInfluxWriter()).toBeInstanceOf(QuestDBInfluxWriter);
expect(client.getSchemaManager()).toBeInstanceOf(QuestDBSchemaManager);
});
it('should handle connection configuration', () => {
expect(client.getHttpUrl()).toBe('http://localhost:9000');
expect(client.getInfluxUrl()).toBe('http://localhost:9009');
expect(client.connected).toBe(false);
});
});
describe('Query Builder', () => {
it('should build query using query builder', () => {
const query = client
.queryBuilder()
.select('symbol', 'close', 'timestamp')
.from('ohlcv')
.whereSymbol('AAPL')
.whereLastHours(24)
.orderBy('timestamp', 'DESC')
.limit(100)
.build();
expect(query).toContain('SELECT symbol, close, timestamp');
expect(query).toContain('FROM ohlcv');
expect(query).toContain("symbol = 'AAPL'");
expect(query).toContain('ORDER BY timestamp DESC');
expect(query).toContain('LIMIT 100');
expect(questdbTestHelpers.validateQuestDBQuery(query)).toBe(true);
});
it('should build time-series specific queries', () => {
const latestQuery = client
.queryBuilder()
.select('*')
.from('ohlcv')
.latestBy('symbol')
.build();
expect(latestQuery).toContain('LATEST BY symbol');
expect(questdbTestHelpers.validateQuestDBQuery(latestQuery)).toBe(true);
const sampleQuery = client
.queryBuilder()
.select('symbol', 'avg(close)')
.from('ohlcv')
.sampleBy('1d')
.build();
expect(sampleQuery).toContain('SAMPLE BY 1d');
expect(questdbTestHelpers.validateQuestDBQuery(sampleQuery)).toBe(true);
});
it('should build aggregation queries', () => {
const query = client
.aggregate('ohlcv')
.select('symbol', 'avg(close) as avg_price', 'max(high) as max_high')
.whereSymbolIn(['AAPL', 'GOOGL'])
.groupBy('symbol')
.sampleBy('1h')
.build();
expect(query).toContain('SELECT symbol, avg(close) as avg_price, max(high) as max_high');
expect(query).toContain('FROM ohlcv');
expect(query).toContain("symbol IN ('AAPL', 'GOOGL')");
expect(query).toContain('SAMPLE BY 1h');
expect(query).toContain('GROUP BY symbol');
expect(questdbTestHelpers.validateQuestDBQuery(query)).toBe(true);
});
});
describe('InfluxDB Writer', () => {
it('should write OHLCV data using InfluxDB line protocol', async () => {
const ohlcvData = [
{
timestamp: new Date('2024-01-01T12:00:00Z'),
open: 150.0,
high: 152.0,
low: 149.5,
close: 151.5,
volume: 1000000,
},
];
// Mock the actual write operation
const writeSpy = spyOn(client.getInfluxWriter(), 'writeOHLCV');
writeSpy.mockReturnValue(Promise.resolve());
await expect(async () => {
await client.writeOHLCV('AAPL', 'NASDAQ', ohlcvData);
}).not.toThrow();
});
it('should handle batch operations', () => {
const lines = questdbTestHelpers.generateInfluxDBLines(3);
expect(lines.length).toBe(3);
lines.forEach(line => {
expect(line).toContain('ohlcv,symbol=TEST');
expect(line).toMatch(/\d{19}$/); // Nanosecond timestamp
});
});
});
describe('Schema Manager', () => {
it('should provide schema access', () => {
const schema = client.getSchemaManager().getSchema('ohlcv_data');
expect(schema).toBeDefined();
expect(schema?.tableName).toBe('ohlcv_data');
const symbolColumn = schema?.columns.find(col => col.name === 'symbol');
expect(symbolColumn).toBeDefined();
expect(symbolColumn?.type).toBe('SYMBOL');
expect(schema?.partitionBy).toBe('DAY');
});
});
describe('Health Monitor', () => {
it('should provide health monitoring capabilities', async () => {
const healthMonitor = client.getHealthMonitor();
expect(healthMonitor).toBeInstanceOf(QuestDBHealthMonitor);
// Mock health status since we're not connected
const mockHealthStatus = {
isHealthy: false,
lastCheck: new Date(),
responseTime: 100,
message: 'Connection not established',
details: {
pgPool: false,
httpEndpoint: false,
uptime: 0,
},
};
const healthSpy = spyOn(healthMonitor, 'getHealthStatus');
healthSpy.mockReturnValue(Promise.resolve(mockHealthStatus));
const health = await healthMonitor.getHealthStatus();
expect(health.isHealthy).toBe(false);
expect(health.lastCheck).toBeInstanceOf(Date);
expect(health.message).toBe('Connection not established');
});
});
describe('Time-Series Operations', () => {
it('should support latest by operations', async () => {
// Mock the query execution
const mockResult = {
rows: [{ symbol: 'AAPL', close: 150.0, timestamp: new Date() }],
rowCount: 1,
executionTime: 10,
metadata: { columns: [] },
};
const querySpy = spyOn(client, 'query');
querySpy.mockReturnValue(Promise.resolve(mockResult));
const result = await client.latestBy('ohlcv', ['symbol', 'close'], 'symbol');
expect(result.rows.length).toBe(1);
expect(result.rows[0].symbol).toBe('AAPL');
});
it('should support sample by operations', async () => {
// Mock the query execution
const mockResult = {
rows: [{ symbol: 'AAPL', avg_close: 150.0, timestamp: new Date() }],
rowCount: 1,
executionTime: 15,
metadata: { columns: [] },
};
const querySpy = spyOn(client, 'query');
querySpy.mockReturnValue(Promise.resolve(mockResult));
const result = await client.sampleBy(
'ohlcv',
['symbol', 'avg(close) as avg_close'],
'1h',
'timestamp',
"symbol = 'AAPL'"
);
expect(result.rows.length).toBe(1);
expect(result.executionTime).toBe(15);
});
});
describe('Connection Management', () => {
it('should handle connection configuration', () => {
expect(client.getHttpUrl()).toBe('http://localhost:9000');
expect(client.getInfluxUrl()).toBe('http://localhost:9009');
expect(client.connected).toBe(false);
});
it('should provide configuration access', () => {
const config = client.configuration;
expect(config.host).toBe('localhost');
expect(config.httpPort).toBe(9000);
expect(config.user).toBe('admin');
});
});
});

View file

@ -1,284 +1,280 @@
/**
* QuestDB Client Test Setup
*
* Setup file specific to QuestDB client library tests.
* Provides utilities and mocks for testing database operations.
*/
import { newDb } from 'pg-mem';
import { mock, spyOn, beforeAll, beforeEach } from 'bun:test';
// Mock PostgreSQL database for unit tests
let pgMem: any;
beforeAll(() => {
// Create in-memory PostgreSQL database
pgMem = newDb();
// Register QuestDB-specific functions
pgMem.public.registerFunction({
name: 'now',
implementation: () => new Date().toISOString()
});
pgMem.public.registerFunction({
name: 'dateadd',
args: [{ type: 'text' }, { type: 'int' }, { type: 'timestamp' }],
returns: 'timestamp',
implementation: (unit: string, amount: number, date: Date) => {
const result = new Date(date);
switch (unit) {
case 'd':
case 'day':
result.setDate(result.getDate() + amount);
break;
case 'h':
case 'hour':
result.setHours(result.getHours() + amount);
break;
case 'm':
case 'minute':
result.setMinutes(result.getMinutes() + amount);
break;
default:
throw new Error(`Unsupported date unit: ${unit}`);
}
return result;
} }); // Mock QuestDB HTTP client
// Mock fetch using Bun's built-in mock
(global as any).fetch = mock(() => {});
// Mock the logger module to avoid Pino configuration conflicts
mock.module('@stock-bot/logger', () => ({
Logger: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
child: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
}))
})),
getLogger: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
child: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
}))
}))
}));
// Mock Pino and its transports to avoid configuration conflicts
mock.module('pino', () => ({
default: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
child: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
}))
}))
}));
mock.module('pino-pretty', () => ({
default: mock(() => ({}))
}));
mock.module('pino-loki', () => ({
default: mock(() => ({}))
}));
});
beforeEach(() => {
// Reset database state
if (pgMem) {
try {
pgMem.public.none('DROP TABLE IF EXISTS ohlcv CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS trades CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS quotes CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS indicators CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS performance CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS risk_metrics CASCADE');
} catch (error) {
// Tables might not exist, ignore errors
}
} // Reset fetch mock
if ((global as any).fetch) {
((global as any).fetch as any).mockClear?.();
}
});
/**
* QuestDB-specific test utilities
*/
export const questdbTestHelpers = {
/**
* Get mock PostgreSQL adapter
*/
getMockPgAdapter: () => pgMem?.adapters?.createPg?.(),
/**
* Execute SQL in mock database
*/
executeMockSQL: (sql: string, params?: any[]) => {
return pgMem?.public?.query(sql, params);
},
/**
* Mock successful QuestDB HTTP response
*/ mockQuestDBHttpSuccess: (data: any) => {
((global as any).fetch as any).mockResolvedValue?.({
ok: true,
status: 200,
json: async () => data,
text: async () => JSON.stringify(data)
});
},
/**
* Mock QuestDB HTTP error
*/ mockQuestDBHttpError: (status: number, message: string) => {
((global as any).fetch as any).mockResolvedValue?.({
ok: false,
status,
json: async () => ({ error: message }),
text: async () => message
});
},
/**
* Mock InfluxDB line protocol response
*/ mockInfluxDBSuccess: () => {
((global as any).fetch as any).mockResolvedValue?.({
ok: true,
status: 204,
text: async () => ''
});
},
/**
* Create test OHLCV table
*/
createTestOHLCVTable: () => {
const sql = `
CREATE TABLE ohlcv (
symbol VARCHAR(10),
timestamp TIMESTAMP,
open DECIMAL(10,2),
high DECIMAL(10,2),
low DECIMAL(10,2),
close DECIMAL(10,2),
volume BIGINT,
source VARCHAR(50)
)
`;
return pgMem?.public?.none(sql);
},
/**
* Insert test OHLCV data
*/
insertTestOHLCVData: (data: any[]) => {
const sql = `
INSERT INTO ohlcv (symbol, timestamp, open, high, low, close, volume, source)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
`;
return Promise.all(
data.map(row =>
pgMem?.public?.none(sql, [
row.symbol,
row.timestamp,
row.open,
row.high,
row.low,
row.close,
row.volume,
row.source || 'test'
])
)
);
},
/**
* Generate InfluxDB line protocol test data
*/
generateInfluxDBLines: (count: number = 5) => {
const lines: string[] = [];
const baseTime = Date.now() * 1000000; // Convert to nanoseconds
for (let i = 0; i < count; i++) {
const time = baseTime + (i * 60000000000); // 1 minute intervals
const price = 150 + Math.random() * 10;
lines.push(
`ohlcv,symbol=TEST open=${price},high=${price + 1},low=${price - 1},close=${price + 0.5},volume=1000i ${time}`
);
}
return lines;
},
/**
* Validate QuestDB query syntax
*/
validateQuestDBQuery: (query: string): boolean => {
// Basic validation for QuestDB-specific syntax
const questdbKeywords = [
'SAMPLE BY',
'LATEST BY',
'ASOF JOIN',
'SPLICE JOIN',
'LT JOIN'
];
// Check for valid SQL structure
const hasSelect = /SELECT\s+/i.test(query);
const hasFrom = /FROM\s+/i.test(query);
return hasSelect && hasFrom;
},
/**
* Mock connection pool
*/ createMockPool: () => {
const mockQuery = () => Promise.resolve({ rows: [], rowCount: 0 });
const mockRelease = () => {};
const mockConnect = () => Promise.resolve({
query: mockQuery,
release: mockRelease
});
const mockEnd = () => Promise.resolve(undefined);
return {
connect: mockConnect,
end: mockEnd,
totalCount: 0,
idleCount: 0,
waitingCount: 0
};
}
};
/**
* QuestDB Client Test Setup
*
* Setup file specific to QuestDB client library tests.
* Provides utilities and mocks for testing database operations.
*/
import { beforeAll, beforeEach, mock, spyOn } from 'bun:test';
import { newDb } from 'pg-mem';
// Mock PostgreSQL database for unit tests
let pgMem: any;
beforeAll(() => {
// Create in-memory PostgreSQL database
pgMem = newDb();
// Register QuestDB-specific functions
pgMem.public.registerFunction({
name: 'now',
implementation: () => new Date().toISOString(),
});
pgMem.public.registerFunction({
name: 'dateadd',
args: [{ type: 'text' }, { type: 'int' }, { type: 'timestamp' }],
returns: 'timestamp',
implementation: (unit: string, amount: number, date: Date) => {
const result = new Date(date);
switch (unit) {
case 'd':
case 'day':
result.setDate(result.getDate() + amount);
break;
case 'h':
case 'hour':
result.setHours(result.getHours() + amount);
break;
case 'm':
case 'minute':
result.setMinutes(result.getMinutes() + amount);
break;
default:
throw new Error(`Unsupported date unit: ${unit}`);
}
return result;
},
}); // Mock QuestDB HTTP client
// Mock fetch using Bun's built-in mock
(global as any).fetch = mock(() => {});
// Mock the logger module to avoid Pino configuration conflicts
mock.module('@stock-bot/logger', () => ({
Logger: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
child: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
})),
})),
getLogger: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
child: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
})),
})),
}));
// Mock Pino and its transports to avoid configuration conflicts
mock.module('pino', () => ({
default: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
child: mock(() => ({
info: mock(() => {}),
warn: mock(() => {}),
error: mock(() => {}),
debug: mock(() => {}),
fatal: mock(() => {}),
trace: mock(() => {}),
})),
})),
}));
mock.module('pino-pretty', () => ({
default: mock(() => ({})),
}));
mock.module('pino-loki', () => ({
default: mock(() => ({})),
}));
});
beforeEach(() => {
// Reset database state
if (pgMem) {
try {
pgMem.public.none('DROP TABLE IF EXISTS ohlcv CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS trades CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS quotes CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS indicators CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS performance CASCADE');
pgMem.public.none('DROP TABLE IF EXISTS risk_metrics CASCADE');
} catch (error) {
// Tables might not exist, ignore errors
}
} // Reset fetch mock
if ((global as any).fetch) {
((global as any).fetch as any).mockClear?.();
}
});
/**
* QuestDB-specific test utilities
*/
export const questdbTestHelpers = {
/**
* Get mock PostgreSQL adapter
*/
getMockPgAdapter: () => pgMem?.adapters?.createPg?.(),
/**
* Execute SQL in mock database
*/
executeMockSQL: (sql: string, params?: any[]) => {
return pgMem?.public?.query(sql, params);
},
/**
* Mock successful QuestDB HTTP response
*/ mockQuestDBHttpSuccess: (data: any) => {
((global as any).fetch as any).mockResolvedValue?.({
ok: true,
status: 200,
json: async () => data,
text: async () => JSON.stringify(data),
});
},
/**
* Mock QuestDB HTTP error
*/ mockQuestDBHttpError: (status: number, message: string) => {
((global as any).fetch as any).mockResolvedValue?.({
ok: false,
status,
json: async () => ({ error: message }),
text: async () => message,
});
},
/**
* Mock InfluxDB line protocol response
*/ mockInfluxDBSuccess: () => {
((global as any).fetch as any).mockResolvedValue?.({
ok: true,
status: 204,
text: async () => '',
});
},
/**
* Create test OHLCV table
*/
createTestOHLCVTable: () => {
const sql = `
CREATE TABLE ohlcv (
symbol VARCHAR(10),
timestamp TIMESTAMP,
open DECIMAL(10,2),
high DECIMAL(10,2),
low DECIMAL(10,2),
close DECIMAL(10,2),
volume BIGINT,
source VARCHAR(50)
)
`;
return pgMem?.public?.none(sql);
},
/**
* Insert test OHLCV data
*/
insertTestOHLCVData: (data: any[]) => {
const sql = `
INSERT INTO ohlcv (symbol, timestamp, open, high, low, close, volume, source)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
`;
return Promise.all(
data.map(row =>
pgMem?.public?.none(sql, [
row.symbol,
row.timestamp,
row.open,
row.high,
row.low,
row.close,
row.volume,
row.source || 'test',
])
)
);
},
/**
* Generate InfluxDB line protocol test data
*/
generateInfluxDBLines: (count: number = 5) => {
const lines: string[] = [];
const baseTime = Date.now() * 1000000; // Convert to nanoseconds
for (let i = 0; i < count; i++) {
const time = baseTime + i * 60000000000; // 1 minute intervals
const price = 150 + Math.random() * 10;
lines.push(
`ohlcv,symbol=TEST open=${price},high=${price + 1},low=${price - 1},close=${price + 0.5},volume=1000i ${time}`
);
}
return lines;
},
/**
* Validate QuestDB query syntax
*/
validateQuestDBQuery: (query: string): boolean => {
// Basic validation for QuestDB-specific syntax
const questdbKeywords = ['SAMPLE BY', 'LATEST BY', 'ASOF JOIN', 'SPLICE JOIN', 'LT JOIN'];
// Check for valid SQL structure
const hasSelect = /SELECT\s+/i.test(query);
const hasFrom = /FROM\s+/i.test(query);
return hasSelect && hasFrom;
},
/**
* Mock connection pool
*/ createMockPool: () => {
const mockQuery = () => Promise.resolve({ rows: [], rowCount: 0 });
const mockRelease = () => {};
const mockConnect = () =>
Promise.resolve({
query: mockQuery,
release: mockRelease,
});
const mockEnd = () => Promise.resolve(undefined);
return {
connect: mockConnect,
end: mockEnd,
totalCount: 0,
idleCount: 0,
waitingCount: 0,
};
},
};

View file

@ -1,79 +1,79 @@
/**
* @stock-bot/shutdown - Shutdown management library
*
* Main exports for the shutdown library
*/
// Core shutdown classes and types
export { Shutdown } from './shutdown';
export type { ShutdownCallback, ShutdownOptions, ShutdownResult } from './types';
import { Shutdown } from './shutdown';
import type { ShutdownResult } from './types';
// Global singleton instance
let globalInstance: Shutdown | null = null;
/**
* Get the global shutdown instance (creates one if it doesn't exist)
*/
function getGlobalInstance(): Shutdown {
if (!globalInstance) {
globalInstance = Shutdown.getInstance();
}
return globalInstance;
}
/**
* Convenience functions for global shutdown management
*/
/**
* Register a cleanup callback that will be executed during shutdown
*/
export function onShutdown(callback: () => Promise<void> | void): void {
getGlobalInstance().onShutdown(callback);
}
/**
* Set the shutdown timeout in milliseconds
*/
export function setShutdownTimeout(timeout: number): void {
getGlobalInstance().setTimeout(timeout);
}
/**
* Check if shutdown is currently in progress
*/
export function isShuttingDown(): boolean {
return globalInstance?.isShutdownInProgress() || false;
}
/**
* Get the number of registered shutdown callbacks
*/
export function getShutdownCallbackCount(): number {
return globalInstance?.getCallbackCount() || 0;
}
/**
* Manually initiate graceful shutdown
*/
export function initiateShutdown(signal?: string): Promise<ShutdownResult> {
return getGlobalInstance().shutdown(signal);
}
/**
* Manually initiate graceful shutdown and exit the process
*/
export function shutdownAndExit(signal?: string, exitCode = 0): Promise<never> {
return getGlobalInstance().shutdownAndExit(signal, exitCode);
}
/**
* Reset the global instance (mainly for testing)
*/
export function resetShutdown(): void {
globalInstance = null;
Shutdown.reset();
}
import { Shutdown } from './shutdown';
import type { ShutdownResult } from './types';
/**
* @stock-bot/shutdown - Shutdown management library
*
* Main exports for the shutdown library
*/
// Core shutdown classes and types
export { Shutdown } from './shutdown';
export type { ShutdownCallback, ShutdownOptions, ShutdownResult } from './types';
// Global singleton instance
let globalInstance: Shutdown | null = null;
/**
* Get the global shutdown instance (creates one if it doesn't exist)
*/
function getGlobalInstance(): Shutdown {
if (!globalInstance) {
globalInstance = Shutdown.getInstance();
}
return globalInstance;
}
/**
* Convenience functions for global shutdown management
*/
/**
* Register a cleanup callback that will be executed during shutdown
*/
export function onShutdown(callback: () => Promise<void> | void): void {
getGlobalInstance().onShutdown(callback);
}
/**
* Set the shutdown timeout in milliseconds
*/
export function setShutdownTimeout(timeout: number): void {
getGlobalInstance().setTimeout(timeout);
}
/**
* Check if shutdown is currently in progress
*/
export function isShuttingDown(): boolean {
return globalInstance?.isShutdownInProgress() || false;
}
/**
* Get the number of registered shutdown callbacks
*/
export function getShutdownCallbackCount(): number {
return globalInstance?.getCallbackCount() || 0;
}
/**
* Manually initiate graceful shutdown
*/
export function initiateShutdown(signal?: string): Promise<ShutdownResult> {
return getGlobalInstance().shutdown(signal);
}
/**
* Manually initiate graceful shutdown and exit the process
*/
export function shutdownAndExit(signal?: string, exitCode = 0): Promise<never> {
return getGlobalInstance().shutdownAndExit(signal, exitCode);
}
/**
* Reset the global instance (mainly for testing)
*/
export function resetShutdown(): void {
globalInstance = null;
Shutdown.reset();
}

View file

@ -1,198 +1,197 @@
/**
* Shutdown management for Node.js applications
*
* Features:
* - Automatic signal handling (SIGTERM, SIGINT, etc.)
* - Configurable shutdown timeout
* - Multiple cleanup callbacks with error handling
* - Platform-specific signal support (Windows/Unix)
*/
import type { ShutdownCallback, ShutdownOptions, ShutdownResult } from './types';
export class Shutdown {
private static instance: Shutdown | null = null;
private isShuttingDown = false;
private shutdownTimeout = 30000; // 30 seconds default
private callbacks: ShutdownCallback[] = [];
private signalHandlersRegistered = false;
constructor(options: ShutdownOptions = {}) {
this.shutdownTimeout = options.timeout || 30000;
if (options.autoRegister !== false) {
this.setupSignalHandlers();
}
}
/**
* Get or create singleton instance
*/
static getInstance(options?: ShutdownOptions): Shutdown {
if (!Shutdown.instance) {
Shutdown.instance = new Shutdown(options);
}
return Shutdown.instance;
}
/**
* Reset singleton instance (mainly for testing)
*/
static reset(): void {
Shutdown.instance = null;
}
/**
* Register a cleanup callback
*/
onShutdown(callback: ShutdownCallback): void {
if (this.isShuttingDown) {
return;
}
this.callbacks.push(callback);
}
/**
* Set shutdown timeout in milliseconds
*/
setTimeout(timeout: number): void {
if (timeout <= 0) {
throw new Error('Shutdown timeout must be positive');
}
this.shutdownTimeout = timeout;
}
/**
* Get current shutdown state
*/
isShutdownInProgress(): boolean {
return this.isShuttingDown;
}
/**
* Get number of registered callbacks
*/
getCallbackCount(): number {
return this.callbacks.length;
}
/**
* Initiate graceful shutdown
*/
async shutdown(signal?: string): Promise<ShutdownResult> {
if (this.isShuttingDown) {
return {
success: false,
callbacksExecuted: 0,
callbacksFailed: 0,
duration: 0,
error: 'Shutdown already in progress'
};
}
this.isShuttingDown = true;
const startTime = Date.now();
const shutdownPromise = this.executeCallbacks();
const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout(() => reject(new Error('Shutdown timeout')), this.shutdownTimeout);
});
let result: ShutdownResult;
try {
const callbackResult = await Promise.race([shutdownPromise, timeoutPromise]);
const duration = Date.now() - startTime;
result = {
success: true,
callbacksExecuted: callbackResult.executed,
callbacksFailed: callbackResult.failed,
duration,
error: callbackResult.failed > 0 ? `${callbackResult.failed} callbacks failed` : undefined
};
} catch (error) {
const duration = Date.now() - startTime;
const errorMessage = error instanceof Error ? error.message : String(error);
result = {
success: false,
callbacksExecuted: 0,
callbacksFailed: 0,
duration,
error: errorMessage
};
}
// Don't call process.exit here - let the caller decide
return result;
}
/**
* Initiate shutdown and exit process
*/
async shutdownAndExit(signal?: string, exitCode = 0): Promise<never> {
const result = await this.shutdown(signal);
const finalExitCode = result.success ? exitCode : 1;
process.exit(finalExitCode);
}
/**
* Execute all registered callbacks
*/
private async executeCallbacks(): Promise<{ executed: number; failed: number }> {
if (this.callbacks.length === 0) {
return { executed: 0, failed: 0 };
}
const results = await Promise.allSettled(
this.callbacks.map(async (callback) => {
await callback();
})
);
const failed = results.filter(result => result.status === 'rejected').length;
const executed = results.length;
return { executed, failed };
}
/**
* Setup signal handlers for graceful shutdown
*/
private setupSignalHandlers(): void {
if (this.signalHandlersRegistered) {
return;
}
// Platform-specific signals
const signals: NodeJS.Signals[] = process.platform === 'win32'
? ['SIGINT', 'SIGTERM']
: ['SIGTERM', 'SIGINT', 'SIGUSR2'];
signals.forEach(signal => {
process.on(signal, () => {
this.shutdownAndExit(signal).catch(() => {
process.exit(1);
});
});
});
// Handle uncaught exceptions
process.on('uncaughtException', () => {
this.shutdownAndExit('uncaughtException', 1).catch(() => {
process.exit(1);
});
});
// Handle unhandled promise rejections
process.on('unhandledRejection', () => {
this.shutdownAndExit('unhandledRejection', 1).catch(() => {
process.exit(1);
});
});
this.signalHandlersRegistered = true;
}
}
/**
* Shutdown management for Node.js applications
*
* Features:
* - Automatic signal handling (SIGTERM, SIGINT, etc.)
* - Configurable shutdown timeout
* - Multiple cleanup callbacks with error handling
* - Platform-specific signal support (Windows/Unix)
*/
import type { ShutdownCallback, ShutdownOptions, ShutdownResult } from './types';
export class Shutdown {
private static instance: Shutdown | null = null;
private isShuttingDown = false;
private shutdownTimeout = 30000; // 30 seconds default
private callbacks: ShutdownCallback[] = [];
private signalHandlersRegistered = false;
constructor(options: ShutdownOptions = {}) {
this.shutdownTimeout = options.timeout || 30000;
if (options.autoRegister !== false) {
this.setupSignalHandlers();
}
}
/**
* Get or create singleton instance
*/
static getInstance(options?: ShutdownOptions): Shutdown {
if (!Shutdown.instance) {
Shutdown.instance = new Shutdown(options);
}
return Shutdown.instance;
}
/**
* Reset singleton instance (mainly for testing)
*/
static reset(): void {
Shutdown.instance = null;
}
/**
* Register a cleanup callback
*/
onShutdown(callback: ShutdownCallback): void {
if (this.isShuttingDown) {
return;
}
this.callbacks.push(callback);
}
/**
* Set shutdown timeout in milliseconds
*/
setTimeout(timeout: number): void {
if (timeout <= 0) {
throw new Error('Shutdown timeout must be positive');
}
this.shutdownTimeout = timeout;
}
/**
* Get current shutdown state
*/
isShutdownInProgress(): boolean {
return this.isShuttingDown;
}
/**
* Get number of registered callbacks
*/
getCallbackCount(): number {
return this.callbacks.length;
}
/**
* Initiate graceful shutdown
*/
async shutdown(signal?: string): Promise<ShutdownResult> {
if (this.isShuttingDown) {
return {
success: false,
callbacksExecuted: 0,
callbacksFailed: 0,
duration: 0,
error: 'Shutdown already in progress',
};
}
this.isShuttingDown = true;
const startTime = Date.now();
const shutdownPromise = this.executeCallbacks();
const timeoutPromise = new Promise<never>((_, reject) => {
setTimeout(() => reject(new Error('Shutdown timeout')), this.shutdownTimeout);
});
let result: ShutdownResult;
try {
const callbackResult = await Promise.race([shutdownPromise, timeoutPromise]);
const duration = Date.now() - startTime;
result = {
success: true,
callbacksExecuted: callbackResult.executed,
callbacksFailed: callbackResult.failed,
duration,
error: callbackResult.failed > 0 ? `${callbackResult.failed} callbacks failed` : undefined,
};
} catch (error) {
const duration = Date.now() - startTime;
const errorMessage = error instanceof Error ? error.message : String(error);
result = {
success: false,
callbacksExecuted: 0,
callbacksFailed: 0,
duration,
error: errorMessage,
};
}
// Don't call process.exit here - let the caller decide
return result;
}
/**
* Initiate shutdown and exit process
*/
async shutdownAndExit(signal?: string, exitCode = 0): Promise<never> {
const result = await this.shutdown(signal);
const finalExitCode = result.success ? exitCode : 1;
process.exit(finalExitCode);
}
/**
* Execute all registered callbacks
*/
private async executeCallbacks(): Promise<{ executed: number; failed: number }> {
if (this.callbacks.length === 0) {
return { executed: 0, failed: 0 };
}
const results = await Promise.allSettled(
this.callbacks.map(async callback => {
await callback();
})
);
const failed = results.filter(result => result.status === 'rejected').length;
const executed = results.length;
return { executed, failed };
}
/**
* Setup signal handlers for graceful shutdown
*/
private setupSignalHandlers(): void {
if (this.signalHandlersRegistered) {
return;
}
// Platform-specific signals
const signals: NodeJS.Signals[] =
process.platform === 'win32' ? ['SIGINT', 'SIGTERM'] : ['SIGTERM', 'SIGINT', 'SIGUSR2'];
signals.forEach(signal => {
process.on(signal, () => {
this.shutdownAndExit(signal).catch(() => {
process.exit(1);
});
});
});
// Handle uncaught exceptions
process.on('uncaughtException', () => {
this.shutdownAndExit('uncaughtException', 1).catch(() => {
process.exit(1);
});
});
// Handle unhandled promise rejections
process.on('unhandledRejection', () => {
this.shutdownAndExit('unhandledRejection', 1).catch(() => {
process.exit(1);
});
});
this.signalHandlersRegistered = true;
}
}

View file

@ -1,34 +1,34 @@
/**
* Types for shutdown functionality
*/
/**
* Callback function for shutdown cleanup
*/
export type ShutdownCallback = () => Promise<void> | void;
/**
* Options for configuring shutdown behavior
*/
export interface ShutdownOptions {
/** Timeout in milliseconds before forcing shutdown (default: 30000) */
timeout?: number;
/** Whether to automatically register signal handlers (default: true) */
autoRegister?: boolean;
}
/**
* Shutdown result information
*/
export interface ShutdownResult {
/** Whether shutdown completed successfully */
success: boolean;
/** Number of callbacks executed */
callbacksExecuted: number;
/** Number of callbacks that failed */
callbacksFailed: number;
/** Time taken for shutdown in milliseconds */
duration: number;
/** Error message if shutdown failed */
error?: string;
}
/**
* Types for shutdown functionality
*/
/**
* Callback function for shutdown cleanup
*/
export type ShutdownCallback = () => Promise<void> | void;
/**
* Options for configuring shutdown behavior
*/
export interface ShutdownOptions {
/** Timeout in milliseconds before forcing shutdown (default: 30000) */
timeout?: number;
/** Whether to automatically register signal handlers (default: true) */
autoRegister?: boolean;
}
/**
* Shutdown result information
*/
export interface ShutdownResult {
/** Whether shutdown completed successfully */
success: boolean;
/** Number of callbacks executed */
callbacksExecuted: number;
/** Number of callbacks that failed */
callbacksFailed: number;
/** Time taken for shutdown in milliseconds */
duration: number;
/** Error message if shutdown failed */
error?: string;
}

View file

@ -1,370 +1,368 @@
import { EventEmitter } from 'eventemitter3';
import { getLogger } from '@stock-bot/logger';
import { EventBus } from '@stock-bot/event-bus';
import { DataFrame } from '@stock-bot/data-frame';
// Core types
export interface MarketData {
symbol: string;
timestamp: number;
open: number;
high: number;
low: number;
close: number;
volume: number;
[key: string]: any;
}
export interface TradingSignal {
type: 'BUY' | 'SELL' | 'HOLD';
symbol: string;
timestamp: number;
price: number;
quantity: number;
confidence: number;
reason: string;
metadata?: Record<string, any>;
}
export interface StrategyContext {
symbol: string;
timeframe: string;
data: DataFrame;
indicators: Record<string, any>;
position?: Position;
portfolio: PortfolioSummary;
timestamp: number;
}
export interface Position {
symbol: string;
quantity: number;
averagePrice: number;
currentPrice: number;
unrealizedPnL: number;
side: 'LONG' | 'SHORT';
}
export interface PortfolioSummary {
totalValue: number;
cash: number;
positions: Position[];
totalPnL: number;
dayPnL: number;
}
export interface StrategyConfig {
id: string;
name: string;
description?: string;
symbols: string[];
timeframes: string[];
parameters: Record<string, any>;
riskLimits: RiskLimits;
enabled: boolean;
}
export interface RiskLimits {
maxPositionSize: number;
maxDailyLoss: number;
maxDrawdown: number;
stopLoss?: number;
takeProfit?: number;
}
// Abstract base strategy class
export abstract class BaseStrategy extends EventEmitter {
protected logger;
protected eventBus: EventBus;
protected config: StrategyConfig;
protected isActive: boolean = false;
constructor(config: StrategyConfig, eventBus: EventBus) {
super();
this.config = config;
this.eventBus = eventBus;
this.logger = getLogger(`strategy:${config.id}`);
}
// Abstract methods that must be implemented by concrete strategies
abstract initialize(): Promise<void>;
abstract onMarketData(context: StrategyContext): Promise<TradingSignal[]>;
abstract onSignal(signal: TradingSignal): Promise<void>;
abstract cleanup(): Promise<void>;
// Optional lifecycle methods
onStart?(): Promise<void>;
onStop?(): Promise<void>;
onError?(error: Error): Promise<void>;
// Control methods
async start(): Promise<void> {
if (this.isActive) {
this.logger.warn('Strategy already active');
return;
}
try {
await this.initialize();
if (this.onStart) {
await this.onStart();
}
this.isActive = true;
this.logger.info('Strategy started', { strategyId: this.config.id });
this.emit('started');
} catch (error) {
this.logger.error('Failed to start strategy', { error, strategyId: this.config.id });
throw error;
}
}
async stop(): Promise<void> {
if (!this.isActive) {
this.logger.warn('Strategy not active');
return;
}
try {
if (this.onStop) {
await this.onStop();
}
await this.cleanup();
this.isActive = false;
this.logger.info('Strategy stopped', { strategyId: this.config.id });
this.emit('stopped');
} catch (error) {
this.logger.error('Failed to stop strategy', { error, strategyId: this.config.id });
throw error;
}
}
// Utility methods
protected async emitSignal(signal: TradingSignal): Promise<void> {
await this.eventBus.publish(this.config.id, signal);
this.emit('signal', signal);
this.logger.info('Signal generated', {
signal: signal.type,
symbol: signal.symbol,
confidence: signal.confidence
});
}
protected checkRiskLimits(signal: TradingSignal, context: StrategyContext): boolean {
const limits = this.config.riskLimits;
// Check position size limit
if (signal.quantity > limits.maxPositionSize) {
this.logger.warn('Signal exceeds max position size', {
requested: signal.quantity,
limit: limits.maxPositionSize
});
return false;
}
// Check daily loss limit
if (context.portfolio.dayPnL <= -limits.maxDailyLoss) {
this.logger.warn('Daily loss limit reached', {
dayPnL: context.portfolio.dayPnL,
limit: -limits.maxDailyLoss
});
return false;
}
return true;
}
// Getters
get id(): string {
return this.config.id;
}
get name(): string {
return this.config.name;
}
get active(): boolean {
return this.isActive;
}
get configuration(): StrategyConfig {
return { ...this.config };
}
}
// Strategy execution engine
export class StrategyEngine extends EventEmitter {
private strategies: Map<string, BaseStrategy> = new Map();
private logger;
private eventBus: EventBus;
private isRunning: boolean = false;
constructor(eventBus: EventBus) {
super();
this.eventBus = eventBus;
this.logger = getLogger('strategy-engine');
}
async initialize(): Promise<void> {
// Subscribe to market data events
await this.eventBus.subscribe('market.data', this.handleMarketData.bind(this));
await this.eventBus.subscribe('order.update', this.handleOrderUpdate.bind(this));
await this.eventBus.subscribe('portfolio.update', this.handlePortfolioUpdate.bind(this));
this.logger.info('Strategy engine initialized');
}
async registerStrategy(strategy: BaseStrategy): Promise<void> {
if (this.strategies.has(strategy.id)) {
throw new Error(`Strategy ${strategy.id} already registered`);
}
this.strategies.set(strategy.id, strategy);
// Forward strategy events
strategy.on('signal', (signal) => this.emit('signal', signal));
strategy.on('error', (error) => this.emit('error', error));
this.logger.info('Strategy registered', { strategyId: strategy.id });
}
async unregisterStrategy(strategyId: string): Promise<void> {
const strategy = this.strategies.get(strategyId);
if (!strategy) {
throw new Error(`Strategy ${strategyId} not found`);
}
if (strategy.active) {
await strategy.stop();
}
strategy.removeAllListeners();
this.strategies.delete(strategyId);
this.logger.info('Strategy unregistered', { strategyId });
}
async startStrategy(strategyId: string): Promise<void> {
const strategy = this.strategies.get(strategyId);
if (!strategy) {
throw new Error(`Strategy ${strategyId} not found`);
}
await strategy.start();
}
async stopStrategy(strategyId: string): Promise<void> {
const strategy = this.strategies.get(strategyId);
if (!strategy) {
throw new Error(`Strategy ${strategyId} not found`);
}
await strategy.stop();
}
async startAll(): Promise<void> {
if (this.isRunning) {
this.logger.warn('Engine already running');
return;
}
const startPromises = Array.from(this.strategies.values())
.filter(strategy => strategy.configuration.enabled)
.map(strategy => strategy.start());
await Promise.all(startPromises);
this.isRunning = true;
this.logger.info('All strategies started');
this.emit('started');
}
async stopAll(): Promise<void> {
if (!this.isRunning) {
this.logger.warn('Engine not running');
return;
}
const stopPromises = Array.from(this.strategies.values())
.filter(strategy => strategy.active)
.map(strategy => strategy.stop());
await Promise.all(stopPromises);
this.isRunning = false;
this.logger.info('All strategies stopped');
this.emit('stopped');
}
private async handleMarketData(message: any): Promise<void> {
const { symbol, ...data } = message.data;
// Find strategies that trade this symbol
const relevantStrategies = Array.from(this.strategies.values())
.filter(strategy =>
strategy.active &&
strategy.configuration.symbols.includes(symbol)
);
for (const strategy of relevantStrategies) {
try {
// Create context for this strategy
const context: StrategyContext = {
symbol,
timeframe: '1m', // TODO: Get from strategy config
data: new DataFrame([data]), // TODO: Use historical data
indicators: {},
portfolio: {
totalValue: 100000, // TODO: Get real portfolio data
cash: 50000,
positions: [],
totalPnL: 0,
dayPnL: 0
},
timestamp: data.timestamp
};
const signals = await strategy.onMarketData(context);
for (const signal of signals) {
await strategy.onSignal(signal);
}
} catch (error) {
this.logger.error('Error processing market data for strategy', {
error,
strategyId: strategy.id,
symbol
});
}
}
}
private async handleOrderUpdate(message: any): Promise<void> {
// Handle order updates - notify relevant strategies
this.logger.debug('Order update received', { data: message.data });
}
private async handlePortfolioUpdate(message: any): Promise<void> {
// Handle portfolio updates - notify relevant strategies
this.logger.debug('Portfolio update received', { data: message.data });
}
getStrategy(strategyId: string): BaseStrategy | undefined {
return this.strategies.get(strategyId);
}
getStrategies(): BaseStrategy[] {
return Array.from(this.strategies.values());
}
getActiveStrategies(): BaseStrategy[] {
return this.getStrategies().filter(strategy => strategy.active);
}
async shutdown(): Promise<void> {
await this.stopAll();
this.strategies.clear();
this.removeAllListeners();
this.logger.info('Strategy engine shutdown');
}
}
import { EventEmitter } from 'eventemitter3';
import { DataFrame } from '@stock-bot/data-frame';
import { EventBus } from '@stock-bot/event-bus';
import { getLogger } from '@stock-bot/logger';
// Core types
export interface MarketData {
symbol: string;
timestamp: number;
open: number;
high: number;
low: number;
close: number;
volume: number;
[key: string]: any;
}
export interface TradingSignal {
type: 'BUY' | 'SELL' | 'HOLD';
symbol: string;
timestamp: number;
price: number;
quantity: number;
confidence: number;
reason: string;
metadata?: Record<string, any>;
}
export interface StrategyContext {
symbol: string;
timeframe: string;
data: DataFrame;
indicators: Record<string, any>;
position?: Position;
portfolio: PortfolioSummary;
timestamp: number;
}
export interface Position {
symbol: string;
quantity: number;
averagePrice: number;
currentPrice: number;
unrealizedPnL: number;
side: 'LONG' | 'SHORT';
}
export interface PortfolioSummary {
totalValue: number;
cash: number;
positions: Position[];
totalPnL: number;
dayPnL: number;
}
export interface StrategyConfig {
id: string;
name: string;
description?: string;
symbols: string[];
timeframes: string[];
parameters: Record<string, any>;
riskLimits: RiskLimits;
enabled: boolean;
}
export interface RiskLimits {
maxPositionSize: number;
maxDailyLoss: number;
maxDrawdown: number;
stopLoss?: number;
takeProfit?: number;
}
// Abstract base strategy class
export abstract class BaseStrategy extends EventEmitter {
protected logger;
protected eventBus: EventBus;
protected config: StrategyConfig;
protected isActive: boolean = false;
constructor(config: StrategyConfig, eventBus: EventBus) {
super();
this.config = config;
this.eventBus = eventBus;
this.logger = getLogger(`strategy:${config.id}`);
}
// Abstract methods that must be implemented by concrete strategies
abstract initialize(): Promise<void>;
abstract onMarketData(context: StrategyContext): Promise<TradingSignal[]>;
abstract onSignal(signal: TradingSignal): Promise<void>;
abstract cleanup(): Promise<void>;
// Optional lifecycle methods
onStart?(): Promise<void>;
onStop?(): Promise<void>;
onError?(error: Error): Promise<void>;
// Control methods
async start(): Promise<void> {
if (this.isActive) {
this.logger.warn('Strategy already active');
return;
}
try {
await this.initialize();
if (this.onStart) {
await this.onStart();
}
this.isActive = true;
this.logger.info('Strategy started', { strategyId: this.config.id });
this.emit('started');
} catch (error) {
this.logger.error('Failed to start strategy', { error, strategyId: this.config.id });
throw error;
}
}
async stop(): Promise<void> {
if (!this.isActive) {
this.logger.warn('Strategy not active');
return;
}
try {
if (this.onStop) {
await this.onStop();
}
await this.cleanup();
this.isActive = false;
this.logger.info('Strategy stopped', { strategyId: this.config.id });
this.emit('stopped');
} catch (error) {
this.logger.error('Failed to stop strategy', { error, strategyId: this.config.id });
throw error;
}
}
// Utility methods
protected async emitSignal(signal: TradingSignal): Promise<void> {
await this.eventBus.publish(this.config.id, signal);
this.emit('signal', signal);
this.logger.info('Signal generated', {
signal: signal.type,
symbol: signal.symbol,
confidence: signal.confidence,
});
}
protected checkRiskLimits(signal: TradingSignal, context: StrategyContext): boolean {
const limits = this.config.riskLimits;
// Check position size limit
if (signal.quantity > limits.maxPositionSize) {
this.logger.warn('Signal exceeds max position size', {
requested: signal.quantity,
limit: limits.maxPositionSize,
});
return false;
}
// Check daily loss limit
if (context.portfolio.dayPnL <= -limits.maxDailyLoss) {
this.logger.warn('Daily loss limit reached', {
dayPnL: context.portfolio.dayPnL,
limit: -limits.maxDailyLoss,
});
return false;
}
return true;
}
// Getters
get id(): string {
return this.config.id;
}
get name(): string {
return this.config.name;
}
get active(): boolean {
return this.isActive;
}
get configuration(): StrategyConfig {
return { ...this.config };
}
}
// Strategy execution engine
export class StrategyEngine extends EventEmitter {
private strategies: Map<string, BaseStrategy> = new Map();
private logger;
private eventBus: EventBus;
private isRunning: boolean = false;
constructor(eventBus: EventBus) {
super();
this.eventBus = eventBus;
this.logger = getLogger('strategy-engine');
}
async initialize(): Promise<void> {
// Subscribe to market data events
await this.eventBus.subscribe('market.data', this.handleMarketData.bind(this));
await this.eventBus.subscribe('order.update', this.handleOrderUpdate.bind(this));
await this.eventBus.subscribe('portfolio.update', this.handlePortfolioUpdate.bind(this));
this.logger.info('Strategy engine initialized');
}
async registerStrategy(strategy: BaseStrategy): Promise<void> {
if (this.strategies.has(strategy.id)) {
throw new Error(`Strategy ${strategy.id} already registered`);
}
this.strategies.set(strategy.id, strategy);
// Forward strategy events
strategy.on('signal', signal => this.emit('signal', signal));
strategy.on('error', error => this.emit('error', error));
this.logger.info('Strategy registered', { strategyId: strategy.id });
}
async unregisterStrategy(strategyId: string): Promise<void> {
const strategy = this.strategies.get(strategyId);
if (!strategy) {
throw new Error(`Strategy ${strategyId} not found`);
}
if (strategy.active) {
await strategy.stop();
}
strategy.removeAllListeners();
this.strategies.delete(strategyId);
this.logger.info('Strategy unregistered', { strategyId });
}
async startStrategy(strategyId: string): Promise<void> {
const strategy = this.strategies.get(strategyId);
if (!strategy) {
throw new Error(`Strategy ${strategyId} not found`);
}
await strategy.start();
}
async stopStrategy(strategyId: string): Promise<void> {
const strategy = this.strategies.get(strategyId);
if (!strategy) {
throw new Error(`Strategy ${strategyId} not found`);
}
await strategy.stop();
}
async startAll(): Promise<void> {
if (this.isRunning) {
this.logger.warn('Engine already running');
return;
}
const startPromises = Array.from(this.strategies.values())
.filter(strategy => strategy.configuration.enabled)
.map(strategy => strategy.start());
await Promise.all(startPromises);
this.isRunning = true;
this.logger.info('All strategies started');
this.emit('started');
}
async stopAll(): Promise<void> {
if (!this.isRunning) {
this.logger.warn('Engine not running');
return;
}
const stopPromises = Array.from(this.strategies.values())
.filter(strategy => strategy.active)
.map(strategy => strategy.stop());
await Promise.all(stopPromises);
this.isRunning = false;
this.logger.info('All strategies stopped');
this.emit('stopped');
}
private async handleMarketData(message: any): Promise<void> {
const { symbol, ...data } = message.data;
// Find strategies that trade this symbol
const relevantStrategies = Array.from(this.strategies.values()).filter(
strategy => strategy.active && strategy.configuration.symbols.includes(symbol)
);
for (const strategy of relevantStrategies) {
try {
// Create context for this strategy
const context: StrategyContext = {
symbol,
timeframe: '1m', // TODO: Get from strategy config
data: new DataFrame([data]), // TODO: Use historical data
indicators: {},
portfolio: {
totalValue: 100000, // TODO: Get real portfolio data
cash: 50000,
positions: [],
totalPnL: 0,
dayPnL: 0,
},
timestamp: data.timestamp,
};
const signals = await strategy.onMarketData(context);
for (const signal of signals) {
await strategy.onSignal(signal);
}
} catch (error) {
this.logger.error('Error processing market data for strategy', {
error,
strategyId: strategy.id,
symbol,
});
}
}
}
private async handleOrderUpdate(message: any): Promise<void> {
// Handle order updates - notify relevant strategies
this.logger.debug('Order update received', { data: message.data });
}
private async handlePortfolioUpdate(message: any): Promise<void> {
// Handle portfolio updates - notify relevant strategies
this.logger.debug('Portfolio update received', { data: message.data });
}
getStrategy(strategyId: string): BaseStrategy | undefined {
return this.strategies.get(strategyId);
}
getStrategies(): BaseStrategy[] {
return Array.from(this.strategies.values());
}
getActiveStrategies(): BaseStrategy[] {
return this.getStrategies().filter(strategy => strategy.active);
}
async shutdown(): Promise<void> {
await this.stopAll();
this.strategies.clear();
this.removeAllListeners();
this.logger.info('Strategy engine shutdown');
}
}

View file

@ -1 +1 @@
// Export all types from the events module
// Export all types from the events module

View file

@ -1,391 +1,429 @@
/**
* Basic Financial Calculations
* Core mathematical functions for financial analysis
*/
/**
* Calculate percentage change between two values
*/
export function percentageChange(oldValue: number, newValue: number): number {
if (oldValue === 0) return 0;
return ((newValue - oldValue) / oldValue) * 100;
}
/**
* Calculate simple return
*/
export function simpleReturn(initialPrice: number, finalPrice: number): number {
if (initialPrice === 0) return 0;
return (finalPrice - initialPrice) / initialPrice;
}
/**
* Calculate logarithmic return
*/
export function logReturn(initialPrice: number, finalPrice: number): number {
if (initialPrice <= 0 || finalPrice <= 0) return 0;
return Math.log(finalPrice / initialPrice);
}
/**
* Calculate compound annual growth rate (CAGR)
*/
export function cagr(startValue: number, endValue: number, years: number): number {
if (years <= 0 || startValue <= 0 || endValue <= 0) return 0;
return Math.pow(endValue / startValue, 1 / years) - 1;
}
/**
* Calculate annualized return from periodic returns
*/
export function annualizeReturn(periodicReturn: number, periodsPerYear: number): number {
return Math.pow(1 + periodicReturn, periodsPerYear) - 1;
}
/**
* Calculate annualized volatility from periodic returns
*/
export function annualizeVolatility(periodicVolatility: number, periodsPerYear: number): number {
return periodicVolatility * Math.sqrt(periodsPerYear);
}
/**
* Calculate present value
*/
export function presentValue(futureValue: number, rate: number, periods: number): number {
return futureValue / Math.pow(1 + rate, periods);
}
/**
* Calculate future value
*/
export function futureValue(presentValue: number, rate: number, periods: number): number {
return presentValue * Math.pow(1 + rate, periods);
}
/**
* Calculate net present value of cash flows
*/
export function netPresentValue(cashFlows: number[], discountRate: number): number {
return cashFlows.reduce((npv, cashFlow, index) => {
return npv + cashFlow / Math.pow(1 + discountRate, index);
}, 0);
}
/**
* Calculate internal rate of return (IRR) using Newton-Raphson method
*/
export function internalRateOfReturn(cashFlows: number[], guess: number = 0.1, maxIterations: number = 100): number {
let rate = guess;
for (let i = 0; i < maxIterations; i++) {
let npv = 0;
let dnpv = 0;
for (let j = 0; j < cashFlows.length; j++) {
npv += cashFlows[j] / Math.pow(1 + rate, j);
dnpv += -j * cashFlows[j] / Math.pow(1 + rate, j + 1);
}
if (Math.abs(npv) < 1e-10) break;
if (Math.abs(dnpv) < 1e-10) break;
rate = rate - npv / dnpv;
}
return rate;
}
/**
* Calculate payback period
*/
export function paybackPeriod(initialInvestment: number, cashFlows: number[]): number {
let cumulativeCashFlow = 0;
for (let i = 0; i < cashFlows.length; i++) {
cumulativeCashFlow += cashFlows[i];
if (cumulativeCashFlow >= initialInvestment) {
return i + 1 - (cumulativeCashFlow - initialInvestment) / cashFlows[i];
}
}
return -1; // Never pays back
}
/**
* Calculate compound interest
*/
export function compoundInterest(
principal: number,
rate: number,
periods: number,
compoundingFrequency: number = 1
): number {
return principal * Math.pow(1 + rate / compoundingFrequency, compoundingFrequency * periods);
}
/**
* Calculate effective annual rate
*/
export function effectiveAnnualRate(nominalRate: number, compoundingFrequency: number): number {
return Math.pow(1 + nominalRate / compoundingFrequency, compoundingFrequency) - 1;
}
/**
* Calculate bond price given yield
*/
export function bondPrice(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2
): number {
const couponPayment = (faceValue * couponRate) / paymentsPerYear;
const discountRate = yieldToMaturity / paymentsPerYear;
let price = 0;
// Present value of coupon payments
for (let i = 1; i <= periodsToMaturity; i++) {
price += couponPayment / Math.pow(1 + discountRate, i);
}
// Present value of face value
price += faceValue / Math.pow(1 + discountRate, periodsToMaturity);
return price;
}
/**
* Calculate bond yield given price (Newton-Raphson approximation)
*/
export function bondYield(
price: number,
faceValue: number,
couponRate: number,
periodsToMaturity: number,
paymentsPerYear: number = 2,
guess: number = 0.05
): number {
let yield_ = guess;
const maxIterations = 100;
const tolerance = 1e-8;
for (let i = 0; i < maxIterations; i++) {
const calculatedPrice = bondPrice(faceValue, couponRate, yield_, periodsToMaturity, paymentsPerYear);
const diff = calculatedPrice - price;
if (Math.abs(diff) < tolerance) break;
// Numerical derivative
const delta = 0.0001;
const priceUp = bondPrice(faceValue, couponRate, yield_ + delta, periodsToMaturity, paymentsPerYear);
const derivative = (priceUp - calculatedPrice) / delta;
if (Math.abs(derivative) < tolerance) break;
yield_ = yield_ - diff / derivative;
}
return yield_;
}
/**
* Calculate duration (Macaulay duration)
*/
export function macaulayDuration(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2
): number {
const couponPayment = (faceValue * couponRate) / paymentsPerYear;
const discountRate = yieldToMaturity / paymentsPerYear;
const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear);
let weightedTime = 0;
// Weighted time of coupon payments
for (let i = 1; i <= periodsToMaturity; i++) {
const presentValue = couponPayment / Math.pow(1 + discountRate, i);
weightedTime += (i * presentValue) / bondPriceValue;
}
// Weighted time of face value
const faceValuePV = faceValue / Math.pow(1 + discountRate, periodsToMaturity);
weightedTime += (periodsToMaturity * faceValuePV) / bondPriceValue;
return weightedTime / paymentsPerYear; // Convert to years
}
/**
* Calculate modified duration
*/
export function modifiedDuration(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2
): number {
const macDuration = macaulayDuration(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear);
return macDuration / (1 + yieldToMaturity / paymentsPerYear);
}
/**
* Calculate bond convexity
*/
export function bondConvexity(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2
): number {
const couponPayment = (faceValue * couponRate) / paymentsPerYear;
const discountRate = yieldToMaturity / paymentsPerYear;
let convexity = 0;
const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear);
for (let i = 1; i <= periodsToMaturity; i++) {
const presentValue = couponPayment / Math.pow(1 + discountRate, i);
convexity += (i * (i + 1) * presentValue) / Math.pow(1 + discountRate, 2);
}
const faceValuePV = faceValue / Math.pow(1 + discountRate, periodsToMaturity);
convexity += (periodsToMaturity * (periodsToMaturity + 1) * faceValuePV) / Math.pow(1 + discountRate, 2);
return convexity / (bondPriceValue * paymentsPerYear * paymentsPerYear);
}
/**
* Calculate dollar duration
*/
export function dollarDuration(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2,
basisPointChange: number = 0.01 // 1 basis point = 0.01%
): number {
const modifiedDur = modifiedDuration(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear);
const bondPriceValue = bondPrice(faceValue, couponRate, yieldToMaturity, periodsToMaturity, paymentsPerYear);
return modifiedDur * bondPriceValue * basisPointChange;
}
/**
* Calculate accrued interest
*/
export function accruedInterest(
faceValue: number,
couponRate: number,
daysSinceLastCoupon: number,
daysInCouponPeriod: number
): number {
return (faceValue * couponRate) * (daysSinceLastCoupon / daysInCouponPeriod);
}
/**
* Calculate clean price
*/
export function cleanPrice(dirtyPrice: number, accruedInterestValue: number): number {
return dirtyPrice - accruedInterestValue;
}
/**
* Calculate dirty price
*/
export function dirtyPrice(cleanPriceValue: number, accruedInterestValue: number): number {
return cleanPriceValue + accruedInterestValue;
}
/**
* Calculate dividend discount model (DDM)
*/
export function dividendDiscountModel(
currentDividend: number,
growthRate: number,
discountRate: number
): number {
if (discountRate <= growthRate) return NaN; // Indeterminate
return currentDividend * (1 + growthRate) / (discountRate - growthRate);
}
/**
* Calculate weighted average cost of capital (WACC)
*/
export function weightedAverageCostOfCapital(
costOfEquity: number,
costOfDebt: number,
equityWeight: number,
debtWeight: number,
taxRate: number
): number {
return (equityWeight * costOfEquity) + (debtWeight * costOfDebt * (1 - taxRate));
}
/**
* Calculate capital asset pricing model (CAPM)
*/
export function capitalAssetPricingModel(
riskFreeRate: number,
beta: number,
marketRiskPremium: number
): number {
return riskFreeRate + beta * marketRiskPremium;
}
/**
* Calculate hurdle rate
*/
export function hurdleRate(
costOfCapital: number,
riskPremium: number
): number {
return costOfCapital + riskPremium;
}
/**
* Calculate degree of operating leverage (DOL)
*/
export function degreeOfOperatingLeverage(
contributionMargin: number,
operatingIncome: number
): number {
return contributionMargin / operatingIncome;
}
/**
* Calculate degree of financial leverage (DFL)
*/
export function degreeOfFinancialLeverage(
ebit: number,
earningsBeforeTax: number
): number {
return ebit / earningsBeforeTax;
}
/**
* Calculate degree of total leverage (DTL)
*/
export function degreeOfTotalLeverage(
dol: number,
dfl: number
): number {
return dol * dfl;
}
/**
* Calculate economic value added (EVA)
*/
export function economicValueAdded(
netOperatingProfitAfterTax: number,
capitalInvested: number,
wacc: number
): number {
return netOperatingProfitAfterTax - (capitalInvested * wacc);
}
/**
* Basic Financial Calculations
* Core mathematical functions for financial analysis
*/
/**
* Calculate percentage change between two values
*/
export function percentageChange(oldValue: number, newValue: number): number {
if (oldValue === 0) return 0;
return ((newValue - oldValue) / oldValue) * 100;
}
/**
* Calculate simple return
*/
export function simpleReturn(initialPrice: number, finalPrice: number): number {
if (initialPrice === 0) return 0;
return (finalPrice - initialPrice) / initialPrice;
}
/**
* Calculate logarithmic return
*/
export function logReturn(initialPrice: number, finalPrice: number): number {
if (initialPrice <= 0 || finalPrice <= 0) return 0;
return Math.log(finalPrice / initialPrice);
}
/**
* Calculate compound annual growth rate (CAGR)
*/
export function cagr(startValue: number, endValue: number, years: number): number {
if (years <= 0 || startValue <= 0 || endValue <= 0) return 0;
return Math.pow(endValue / startValue, 1 / years) - 1;
}
/**
* Calculate annualized return from periodic returns
*/
export function annualizeReturn(periodicReturn: number, periodsPerYear: number): number {
return Math.pow(1 + periodicReturn, periodsPerYear) - 1;
}
/**
* Calculate annualized volatility from periodic returns
*/
export function annualizeVolatility(periodicVolatility: number, periodsPerYear: number): number {
return periodicVolatility * Math.sqrt(periodsPerYear);
}
/**
* Calculate present value
*/
export function presentValue(futureValue: number, rate: number, periods: number): number {
return futureValue / Math.pow(1 + rate, periods);
}
/**
* Calculate future value
*/
export function futureValue(presentValue: number, rate: number, periods: number): number {
return presentValue * Math.pow(1 + rate, periods);
}
/**
* Calculate net present value of cash flows
*/
export function netPresentValue(cashFlows: number[], discountRate: number): number {
return cashFlows.reduce((npv, cashFlow, index) => {
return npv + cashFlow / Math.pow(1 + discountRate, index);
}, 0);
}
/**
* Calculate internal rate of return (IRR) using Newton-Raphson method
*/
export function internalRateOfReturn(
cashFlows: number[],
guess: number = 0.1,
maxIterations: number = 100
): number {
let rate = guess;
for (let i = 0; i < maxIterations; i++) {
let npv = 0;
let dnpv = 0;
for (let j = 0; j < cashFlows.length; j++) {
npv += cashFlows[j] / Math.pow(1 + rate, j);
dnpv += (-j * cashFlows[j]) / Math.pow(1 + rate, j + 1);
}
if (Math.abs(npv) < 1e-10) break;
if (Math.abs(dnpv) < 1e-10) break;
rate = rate - npv / dnpv;
}
return rate;
}
/**
* Calculate payback period
*/
export function paybackPeriod(initialInvestment: number, cashFlows: number[]): number {
let cumulativeCashFlow = 0;
for (let i = 0; i < cashFlows.length; i++) {
cumulativeCashFlow += cashFlows[i];
if (cumulativeCashFlow >= initialInvestment) {
return i + 1 - (cumulativeCashFlow - initialInvestment) / cashFlows[i];
}
}
return -1; // Never pays back
}
/**
* Calculate compound interest
*/
export function compoundInterest(
principal: number,
rate: number,
periods: number,
compoundingFrequency: number = 1
): number {
return principal * Math.pow(1 + rate / compoundingFrequency, compoundingFrequency * periods);
}
/**
* Calculate effective annual rate
*/
export function effectiveAnnualRate(nominalRate: number, compoundingFrequency: number): number {
return Math.pow(1 + nominalRate / compoundingFrequency, compoundingFrequency) - 1;
}
/**
* Calculate bond price given yield
*/
export function bondPrice(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2
): number {
const couponPayment = (faceValue * couponRate) / paymentsPerYear;
const discountRate = yieldToMaturity / paymentsPerYear;
let price = 0;
// Present value of coupon payments
for (let i = 1; i <= periodsToMaturity; i++) {
price += couponPayment / Math.pow(1 + discountRate, i);
}
// Present value of face value
price += faceValue / Math.pow(1 + discountRate, periodsToMaturity);
return price;
}
/**
* Calculate bond yield given price (Newton-Raphson approximation)
*/
export function bondYield(
price: number,
faceValue: number,
couponRate: number,
periodsToMaturity: number,
paymentsPerYear: number = 2,
guess: number = 0.05
): number {
let yield_ = guess;
const maxIterations = 100;
const tolerance = 1e-8;
for (let i = 0; i < maxIterations; i++) {
const calculatedPrice = bondPrice(
faceValue,
couponRate,
yield_,
periodsToMaturity,
paymentsPerYear
);
const diff = calculatedPrice - price;
if (Math.abs(diff) < tolerance) break;
// Numerical derivative
const delta = 0.0001;
const priceUp = bondPrice(
faceValue,
couponRate,
yield_ + delta,
periodsToMaturity,
paymentsPerYear
);
const derivative = (priceUp - calculatedPrice) / delta;
if (Math.abs(derivative) < tolerance) break;
yield_ = yield_ - diff / derivative;
}
return yield_;
}
/**
* Calculate duration (Macaulay duration)
*/
export function macaulayDuration(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2
): number {
const couponPayment = (faceValue * couponRate) / paymentsPerYear;
const discountRate = yieldToMaturity / paymentsPerYear;
const bondPriceValue = bondPrice(
faceValue,
couponRate,
yieldToMaturity,
periodsToMaturity,
paymentsPerYear
);
let weightedTime = 0;
// Weighted time of coupon payments
for (let i = 1; i <= periodsToMaturity; i++) {
const presentValue = couponPayment / Math.pow(1 + discountRate, i);
weightedTime += (i * presentValue) / bondPriceValue;
}
// Weighted time of face value
const faceValuePV = faceValue / Math.pow(1 + discountRate, periodsToMaturity);
weightedTime += (periodsToMaturity * faceValuePV) / bondPriceValue;
return weightedTime / paymentsPerYear; // Convert to years
}
/**
* Calculate modified duration
*/
export function modifiedDuration(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2
): number {
const macDuration = macaulayDuration(
faceValue,
couponRate,
yieldToMaturity,
periodsToMaturity,
paymentsPerYear
);
return macDuration / (1 + yieldToMaturity / paymentsPerYear);
}
/**
* Calculate bond convexity
*/
export function bondConvexity(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2
): number {
const couponPayment = (faceValue * couponRate) / paymentsPerYear;
const discountRate = yieldToMaturity / paymentsPerYear;
let convexity = 0;
const bondPriceValue = bondPrice(
faceValue,
couponRate,
yieldToMaturity,
periodsToMaturity,
paymentsPerYear
);
for (let i = 1; i <= periodsToMaturity; i++) {
const presentValue = couponPayment / Math.pow(1 + discountRate, i);
convexity += (i * (i + 1) * presentValue) / Math.pow(1 + discountRate, 2);
}
const faceValuePV = faceValue / Math.pow(1 + discountRate, periodsToMaturity);
convexity +=
(periodsToMaturity * (periodsToMaturity + 1) * faceValuePV) / Math.pow(1 + discountRate, 2);
return convexity / (bondPriceValue * paymentsPerYear * paymentsPerYear);
}
/**
* Calculate dollar duration
*/
export function dollarDuration(
faceValue: number,
couponRate: number,
yieldToMaturity: number,
periodsToMaturity: number,
paymentsPerYear: number = 2,
basisPointChange: number = 0.01 // 1 basis point = 0.01%
): number {
const modifiedDur = modifiedDuration(
faceValue,
couponRate,
yieldToMaturity,
periodsToMaturity,
paymentsPerYear
);
const bondPriceValue = bondPrice(
faceValue,
couponRate,
yieldToMaturity,
periodsToMaturity,
paymentsPerYear
);
return modifiedDur * bondPriceValue * basisPointChange;
}
/**
* Calculate accrued interest
*/
export function accruedInterest(
faceValue: number,
couponRate: number,
daysSinceLastCoupon: number,
daysInCouponPeriod: number
): number {
return faceValue * couponRate * (daysSinceLastCoupon / daysInCouponPeriod);
}
/**
* Calculate clean price
*/
export function cleanPrice(dirtyPrice: number, accruedInterestValue: number): number {
return dirtyPrice - accruedInterestValue;
}
/**
* Calculate dirty price
*/
export function dirtyPrice(cleanPriceValue: number, accruedInterestValue: number): number {
return cleanPriceValue + accruedInterestValue;
}
/**
* Calculate dividend discount model (DDM)
*/
export function dividendDiscountModel(
currentDividend: number,
growthRate: number,
discountRate: number
): number {
if (discountRate <= growthRate) return NaN; // Indeterminate
return (currentDividend * (1 + growthRate)) / (discountRate - growthRate);
}
/**
* Calculate weighted average cost of capital (WACC)
*/
export function weightedAverageCostOfCapital(
costOfEquity: number,
costOfDebt: number,
equityWeight: number,
debtWeight: number,
taxRate: number
): number {
return equityWeight * costOfEquity + debtWeight * costOfDebt * (1 - taxRate);
}
/**
* Calculate capital asset pricing model (CAPM)
*/
export function capitalAssetPricingModel(
riskFreeRate: number,
beta: number,
marketRiskPremium: number
): number {
return riskFreeRate + beta * marketRiskPremium;
}
/**
* Calculate hurdle rate
*/
export function hurdleRate(costOfCapital: number, riskPremium: number): number {
return costOfCapital + riskPremium;
}
/**
* Calculate degree of operating leverage (DOL)
*/
export function degreeOfOperatingLeverage(
contributionMargin: number,
operatingIncome: number
): number {
return contributionMargin / operatingIncome;
}
/**
* Calculate degree of financial leverage (DFL)
*/
export function degreeOfFinancialLeverage(ebit: number, earningsBeforeTax: number): number {
return ebit / earningsBeforeTax;
}
/**
* Calculate degree of total leverage (DTL)
*/
export function degreeOfTotalLeverage(dol: number, dfl: number): number {
return dol * dfl;
}
/**
* Calculate economic value added (EVA)
*/
export function economicValueAdded(
netOperatingProfitAfterTax: number,
capitalInvested: number,
wacc: number
): number {
return netOperatingProfitAfterTax - capitalInvested * wacc;
}

File diff suppressed because it is too large Load diff

View file

@ -1,166 +1,175 @@
/**
* Comprehensive Financial Calculations Library
*
* This module provides a complete set of financial calculations for trading and investment analysis.
* Organized into logical categories for easy use and maintenance.
*/
// Core interfaces for financial data
export interface OHLCVData {
open: number;
high: number;
low: number;
close: number;
volume: number;
timestamp: Date;
}
export interface PriceData {
price: number;
timestamp: Date;
}
// Financial calculation result interfaces
export interface PortfolioMetrics {
totalValue: number;
totalReturn: number;
totalReturnPercent: number;
dailyReturn: number;
dailyReturnPercent: number;
maxDrawdown: number;
sharpeRatio: number;
beta: number;
alpha: number;
volatility: number;
}
export interface RiskMetrics {
var95: number; // Value at Risk 95%
var99: number; // Value at Risk 99%
cvar95: number; // Conditional VaR 95%
maxDrawdown: number;
volatility: number;
downside_deviation: number;
calmar_ratio: number;
sortino_ratio: number;
beta: number;
alpha: number;
sharpeRatio: number;
treynorRatio: number;
trackingError: number;
informationRatio: number;
}
export interface TechnicalIndicators {
sma: number[];
ema: number[];
rsi: number[];
macd: { macd: number[], signal: number[], histogram: number[] };
bollinger: { upper: number[], middle: number[], lower: number[] };
atr: number[];
stochastic: { k: number[], d: number[] };
williams_r: number[];
cci: number[];
momentum: number[];
roc: number[];
}
// Additional interfaces for new functionality
export interface TradeExecution {
entry: number;
exit: number;
peak?: number;
trough?: number;
volume: number;
timestamp: Date;
}
export interface MarketData {
price: number;
volume: number;
timestamp: Date;
bid?: number;
ask?: number;
bidSize?: number;
askSize?: number;
}
export interface BacktestResults {
trades: TradeExecution[];
equityCurve: Array<{ value: number; date: Date }>;
performance: PortfolioMetrics;
riskMetrics: RiskMetrics;
drawdownAnalysis: any; // Import from performance-metrics
}
// Export all calculation functions
export * from './basic-calculations';
export * from './technical-indicators';
export * from './risk-metrics';
export * from './portfolio-analytics';
export * from './options-pricing';
export * from './position-sizing';
export * from './performance-metrics';
export * from './market-statistics';
export * from './volatility-models';
export * from './correlation-analysis';
// Import specific functions for convenience functions
import {
sma, ema, rsi, macd, bollingerBands, atr, stochastic,
williamsR, cci, momentum, roc
} from './technical-indicators';
import { calculateRiskMetrics } from './risk-metrics';
import { calculateStrategyMetrics } from './performance-metrics';
// Convenience function to calculate all technical indicators at once
export function calculateAllTechnicalIndicators(
ohlcv: OHLCVData[],
periods: { sma?: number; ema?: number; rsi?: number; atr?: number } = {}
): TechnicalIndicators {
const {
sma: smaPeriod = 20,
ema: emaPeriod = 20,
rsi: rsiPeriod = 14,
atr: atrPeriod = 14
} = periods;
const closes = ohlcv.map(d => d.close);
return {
sma: sma(closes, smaPeriod),
ema: ema(closes, emaPeriod),
rsi: rsi(closes, rsiPeriod),
macd: macd(closes),
bollinger: bollingerBands(closes),
atr: atr(ohlcv, atrPeriod),
stochastic: stochastic(ohlcv),
williams_r: williamsR(ohlcv),
cci: cci(ohlcv),
momentum: momentum(closes),
roc: roc(closes)
};
}
// Convenience function for comprehensive portfolio analysis
export function analyzePortfolio(
returns: number[],
equityCurve: Array<{ value: number; date: Date }>,
benchmarkReturns?: number[],
riskFreeRate: number = 0.02
): {
performance: PortfolioMetrics;
risk: RiskMetrics;
trades?: any;
drawdown?: any;
} {
const performance = calculateStrategyMetrics(equityCurve, benchmarkReturns, riskFreeRate);
const equityValues = equityCurve.map(point => point.value);
const risk = calculateRiskMetrics(returns, equityValues, benchmarkReturns, riskFreeRate);
return {
performance,
risk
};
}
// Import specific functions for convenience functions
import { calculateStrategyMetrics } from './performance-metrics';
import { calculateRiskMetrics } from './risk-metrics';
import {
atr,
bollingerBands,
cci,
ema,
macd,
momentum,
roc,
rsi,
sma,
stochastic,
williamsR,
} from './technical-indicators';
/**
* Comprehensive Financial Calculations Library
*
* This module provides a complete set of financial calculations for trading and investment analysis.
* Organized into logical categories for easy use and maintenance.
*/
// Core interfaces for financial data
export interface OHLCVData {
open: number;
high: number;
low: number;
close: number;
volume: number;
timestamp: Date;
}
export interface PriceData {
price: number;
timestamp: Date;
}
// Financial calculation result interfaces
export interface PortfolioMetrics {
totalValue: number;
totalReturn: number;
totalReturnPercent: number;
dailyReturn: number;
dailyReturnPercent: number;
maxDrawdown: number;
sharpeRatio: number;
beta: number;
alpha: number;
volatility: number;
}
export interface RiskMetrics {
var95: number; // Value at Risk 95%
var99: number; // Value at Risk 99%
cvar95: number; // Conditional VaR 95%
maxDrawdown: number;
volatility: number;
downside_deviation: number;
calmar_ratio: number;
sortino_ratio: number;
beta: number;
alpha: number;
sharpeRatio: number;
treynorRatio: number;
trackingError: number;
informationRatio: number;
}
export interface TechnicalIndicators {
sma: number[];
ema: number[];
rsi: number[];
macd: { macd: number[]; signal: number[]; histogram: number[] };
bollinger: { upper: number[]; middle: number[]; lower: number[] };
atr: number[];
stochastic: { k: number[]; d: number[] };
williams_r: number[];
cci: number[];
momentum: number[];
roc: number[];
}
// Additional interfaces for new functionality
export interface TradeExecution {
entry: number;
exit: number;
peak?: number;
trough?: number;
volume: number;
timestamp: Date;
}
export interface MarketData {
price: number;
volume: number;
timestamp: Date;
bid?: number;
ask?: number;
bidSize?: number;
askSize?: number;
}
export interface BacktestResults {
trades: TradeExecution[];
equityCurve: Array<{ value: number; date: Date }>;
performance: PortfolioMetrics;
riskMetrics: RiskMetrics;
drawdownAnalysis: any; // Import from performance-metrics
}
// Export all calculation functions
export * from './basic-calculations';
export * from './technical-indicators';
export * from './risk-metrics';
export * from './portfolio-analytics';
export * from './options-pricing';
export * from './position-sizing';
export * from './performance-metrics';
export * from './market-statistics';
export * from './volatility-models';
export * from './correlation-analysis';
// Convenience function to calculate all technical indicators at once
export function calculateAllTechnicalIndicators(
ohlcv: OHLCVData[],
periods: { sma?: number; ema?: number; rsi?: number; atr?: number } = {}
): TechnicalIndicators {
const {
sma: smaPeriod = 20,
ema: emaPeriod = 20,
rsi: rsiPeriod = 14,
atr: atrPeriod = 14,
} = periods;
const closes = ohlcv.map(d => d.close);
return {
sma: sma(closes, smaPeriod),
ema: ema(closes, emaPeriod),
rsi: rsi(closes, rsiPeriod),
macd: macd(closes),
bollinger: bollingerBands(closes),
atr: atr(ohlcv, atrPeriod),
stochastic: stochastic(ohlcv),
williams_r: williamsR(ohlcv),
cci: cci(ohlcv),
momentum: momentum(closes),
roc: roc(closes),
};
}
// Convenience function for comprehensive portfolio analysis
export function analyzePortfolio(
returns: number[],
equityCurve: Array<{ value: number; date: Date }>,
benchmarkReturns?: number[],
riskFreeRate: number = 0.02
): {
performance: PortfolioMetrics;
risk: RiskMetrics;
trades?: any;
drawdown?: any;
} {
const performance = calculateStrategyMetrics(equityCurve, benchmarkReturns, riskFreeRate);
const equityValues = equityCurve.map(point => point.value);
const risk = calculateRiskMetrics(returns, equityValues, benchmarkReturns, riskFreeRate);
return {
performance,
risk,
};
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,375 +1,387 @@
/**
* Risk Metrics and Analysis
* Comprehensive risk measurement tools for portfolio and trading analysis
*/
import { RiskMetrics, treynorRatio } from './index';
/**
* Calculate Value at Risk (VaR) using historical simulation
*/
export function valueAtRisk(returns: number[], confidenceLevel: number = 0.95): number {
if (returns.length === 0) return 0;
const sortedReturns = [...returns].sort((a, b) => a - b);
const index = Math.floor((1 - confidenceLevel) * sortedReturns.length);
return sortedReturns[index] || 0;
}
/**
* Calculate Conditional Value at Risk (CVaR/Expected Shortfall)
*/
export function conditionalValueAtRisk(returns: number[], confidenceLevel: number = 0.95): number {
if (returns.length === 0) return 0;
const sortedReturns = [...returns].sort((a, b) => a - b);
const cutoffIndex = Math.floor((1 - confidenceLevel) * sortedReturns.length);
if (cutoffIndex === 0) return sortedReturns[0];
const tailReturns = sortedReturns.slice(0, cutoffIndex);
return tailReturns.reduce((sum, ret) => sum + ret, 0) / tailReturns.length;
}
/**
* Calculate parametric VaR using normal distribution
*/
export function parametricVaR(
returns: number[],
confidenceLevel: number = 0.95,
portfolioValue: number = 1
): number {
if (returns.length === 0) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
const stdDev = Math.sqrt(variance);
// Z-score for confidence level (normal distribution)
const zScore = getZScore(confidenceLevel);
return portfolioValue * (mean - zScore * stdDev);
}
/**
* Calculate maximum drawdown
*/
export function maxDrawdown(equityCurve: number[]): number {
if (equityCurve.length < 2) return 0;
let maxDD = 0;
let peak = equityCurve[0];
for (let i = 1; i < equityCurve.length; i++) {
if (equityCurve[i] > peak) {
peak = equityCurve[i];
} else {
const drawdown = (peak - equityCurve[i]) / peak;
maxDD = Math.max(maxDD, drawdown);
}
}
return maxDD;
}
/**
* Calculate downside deviation
*/
export function downsideDeviation(returns: number[], targetReturn: number = 0): number {
if (returns.length === 0) return 0;
const downsideReturns = returns.filter(ret => ret < targetReturn);
if (downsideReturns.length === 0) return 0;
const sumSquaredDownside = downsideReturns.reduce(
(sum, ret) => sum + Math.pow(ret - targetReturn, 2),
0
);
return Math.sqrt(sumSquaredDownside / returns.length);
}
/**
* Calculate Sharpe ratio
*/
export function sharpeRatio(returns: number[], riskFreeRate: number = 0): number {
if (returns.length < 2) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
const stdDev = Math.sqrt(variance);
if (stdDev === 0) return 0;
return (mean - riskFreeRate) / stdDev;
}
/**
* Calculate beta coefficient
*/
export function beta(portfolioReturns: number[], marketReturns: number[]): number {
if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < 2) {
return 0;
}
const n = portfolioReturns.length;
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / n;
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / n;
let covariance = 0;
let marketVariance = 0;
for (let i = 0; i < n; i++) {
const portfolioDiff = portfolioReturns[i] - portfolioMean;
const marketDiff = marketReturns[i] - marketMean;
covariance += portfolioDiff * marketDiff;
marketVariance += marketDiff * marketDiff;
}
return marketVariance === 0 ? 0 : covariance / marketVariance;
}
/**
* Calculate alpha
*/
export function alpha(
portfolioReturns: number[],
marketReturns: number[],
riskFreeRate: number = 0
): number {
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length;
const portfolioBeta = beta(portfolioReturns, marketReturns);
return portfolioMean - (riskFreeRate + portfolioBeta * (marketMean - riskFreeRate));
}
/**
* Calculate tracking error
*/
export function trackingError(portfolioReturns: number[], benchmarkReturns: number[]): number {
if (portfolioReturns.length !== benchmarkReturns.length || portfolioReturns.length === 0) {
return 0;
}
const activeReturns = portfolioReturns.map((ret, i) => ret - benchmarkReturns[i]);
const mean = activeReturns.reduce((sum, ret) => sum + ret, 0) / activeReturns.length;
const variance = activeReturns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (activeReturns.length - 1);
return Math.sqrt(variance);
}
/**
* Calculate volatility (standard deviation of returns)
*/
export function volatility(returns: number[]): number {
if (returns.length < 2) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
return Math.sqrt(variance);
}
/**
* Calculate annualized volatility
*/
export function annualizedVolatility(returns: number[], periodsPerYear: number = 252): number {
return volatility(returns) * Math.sqrt(periodsPerYear);
}
/**
* Calculate skewness (measure of asymmetry)
*/
export function skewness(returns: number[]): number {
if (returns.length < 3) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
const stdDev = Math.sqrt(variance);
if (stdDev === 0) return 0;
const skew = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 3), 0) / returns.length;
return skew;
}
/**
* Calculate kurtosis (measure of tail heaviness)
*/
export function kurtosis(returns: number[]): number {
if (returns.length < 4) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
const stdDev = Math.sqrt(variance);
if (stdDev === 0) return 0;
const kurt = returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 4), 0) / returns.length;
return kurt - 3; // Excess kurtosis (subtract 3 for normal distribution baseline)
}
/**
* Calculate comprehensive risk metrics
*/
export function calculateRiskMetrics(
returns: number[],
equityCurve: number[],
marketReturns?: number[],
riskFreeRate: number = 0
): RiskMetrics {
if (returns.length === 0) {
return {
var95: 0,
var99: 0,
cvar95: 0,
maxDrawdown: 0,
volatility: 0,
downside_deviation: 0,
calmar_ratio: 0,
sortino_ratio: 0,
beta: 0,
alpha: 0,
sharpeRatio: 0,
treynorRatio: 0,
trackingError: 0,
informationRatio: 0
};
}
const portfolioVolatility = volatility(returns);
const portfolioMean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
// Calculate VaR
const var95Value = valueAtRisk(returns, 0.95);
const var99Value = valueAtRisk(returns, 0.99);
const cvar95Value = conditionalValueAtRisk(returns, 0.95);
// Calculate max drawdown
const maxDD = maxDrawdown(equityCurve);
// Calculate downside deviation
const downsideDeviationValue = downsideDeviation(returns);
// Calculate ratios
const calmarRatio = maxDD > 0 ? portfolioMean / maxDD : 0;
const sortinoRatio = downsideDeviationValue > 0 ? (portfolioMean - riskFreeRate) / downsideDeviationValue : 0;
const sharpeRatio = portfolioVolatility > 0 ? (portfolioMean - riskFreeRate) / portfolioVolatility : 0;
let portfolioBeta = 0;
let portfolioAlpha = 0;
let portfolioTreynorRatio = 0;
let portfolioTrackingError = 0;
let informationRatio = 0;
if (marketReturns && marketReturns.length === returns.length) {
portfolioBeta = beta(returns, marketReturns);
portfolioAlpha = alpha(returns, marketReturns, riskFreeRate);
portfolioTreynorRatio = treynorRatio(returns, marketReturns, riskFreeRate);
portfolioTrackingError = trackingError(returns, marketReturns);
informationRatio = portfolioTrackingError > 0 ? portfolioAlpha / portfolioTrackingError : 0;
}
return {
var95: var95Value,
var99: var99Value,
cvar95: cvar95Value,
maxDrawdown: maxDD,
volatility: portfolioVolatility,
downside_deviation: downsideDeviationValue,
calmar_ratio: calmarRatio,
sortino_ratio: sortinoRatio,
beta: portfolioBeta,
alpha: portfolioAlpha,
sharpeRatio,
treynorRatio: portfolioTreynorRatio,
trackingError: portfolioTrackingError,
informationRatio
};
}
/**
* Helper function to get Z-score for confidence level
* This implementation handles arbitrary confidence levels
*/
function getZScore(confidenceLevel: number): number {
// First check our lookup table for common values (more precise)
const zScores: { [key: string]: number } = {
'0.90': 1.282,
'0.95': 1.645,
'0.975': 1.960,
'0.99': 2.326,
'0.995': 2.576
};
const key = confidenceLevel.toString();
if (zScores[key]) return zScores[key];
// For arbitrary confidence levels, use approximation
if (confidenceLevel < 0.5) return -getZScore(1 - confidenceLevel);
if (confidenceLevel >= 0.999) return 3.09; // Cap at 99.9% for numerical stability
// Approximation of inverse normal CDF
const y = Math.sqrt(-2.0 * Math.log(1.0 - confidenceLevel));
return y - (2.515517 + 0.802853 * y + 0.010328 * y * y) /
(1.0 + 1.432788 * y + 0.189269 * y * y + 0.001308 * y * y * y);
}
/**
* Calculate portfolio risk contribution
*/
export function riskContribution(
weights: number[],
covarianceMatrix: number[][],
portfolioVolatility: number
): number[] {
const n = weights.length;
const contributions: number[] = [];
for (let i = 0; i < n; i++) {
let marginalContribution = 0;
for (let j = 0; j < n; j++) {
marginalContribution += weights[j] * covarianceMatrix[i][j];
}
const contribution = (weights[i] * marginalContribution) / Math.pow(portfolioVolatility, 2);
contributions.push(contribution);
}
return contributions;
}
/**
* Calculate Ulcer Index
*/
export function ulcerIndex(equityCurve: Array<{ value: number; date: Date }>): number {
let sumSquaredDrawdown = 0;
let peak = equityCurve[0].value;
for (const point of equityCurve) {
peak = Math.max(peak, point.value);
const drawdownPercent = (peak - point.value) / peak * 100;
sumSquaredDrawdown += drawdownPercent * drawdownPercent;
}
return Math.sqrt(sumSquaredDrawdown / equityCurve.length);
}
/**
* Calculate risk-adjusted return (RAR)
*/
export function riskAdjustedReturn(
portfolioReturn: number,
portfolioRisk: number,
riskFreeRate: number = 0
): number {
if (portfolioRisk === 0) return 0;
return (portfolioReturn - riskFreeRate) / portfolioRisk;
}
/**
* Risk Metrics and Analysis
* Comprehensive risk measurement tools for portfolio and trading analysis
*/
import { RiskMetrics, treynorRatio } from './index';
/**
* Calculate Value at Risk (VaR) using historical simulation
*/
export function valueAtRisk(returns: number[], confidenceLevel: number = 0.95): number {
if (returns.length === 0) return 0;
const sortedReturns = [...returns].sort((a, b) => a - b);
const index = Math.floor((1 - confidenceLevel) * sortedReturns.length);
return sortedReturns[index] || 0;
}
/**
* Calculate Conditional Value at Risk (CVaR/Expected Shortfall)
*/
export function conditionalValueAtRisk(returns: number[], confidenceLevel: number = 0.95): number {
if (returns.length === 0) return 0;
const sortedReturns = [...returns].sort((a, b) => a - b);
const cutoffIndex = Math.floor((1 - confidenceLevel) * sortedReturns.length);
if (cutoffIndex === 0) return sortedReturns[0];
const tailReturns = sortedReturns.slice(0, cutoffIndex);
return tailReturns.reduce((sum, ret) => sum + ret, 0) / tailReturns.length;
}
/**
* Calculate parametric VaR using normal distribution
*/
export function parametricVaR(
returns: number[],
confidenceLevel: number = 0.95,
portfolioValue: number = 1
): number {
if (returns.length === 0) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance =
returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
const stdDev = Math.sqrt(variance);
// Z-score for confidence level (normal distribution)
const zScore = getZScore(confidenceLevel);
return portfolioValue * (mean - zScore * stdDev);
}
/**
* Calculate maximum drawdown
*/
export function maxDrawdown(equityCurve: number[]): number {
if (equityCurve.length < 2) return 0;
let maxDD = 0;
let peak = equityCurve[0];
for (let i = 1; i < equityCurve.length; i++) {
if (equityCurve[i] > peak) {
peak = equityCurve[i];
} else {
const drawdown = (peak - equityCurve[i]) / peak;
maxDD = Math.max(maxDD, drawdown);
}
}
return maxDD;
}
/**
* Calculate downside deviation
*/
export function downsideDeviation(returns: number[], targetReturn: number = 0): number {
if (returns.length === 0) return 0;
const downsideReturns = returns.filter(ret => ret < targetReturn);
if (downsideReturns.length === 0) return 0;
const sumSquaredDownside = downsideReturns.reduce(
(sum, ret) => sum + Math.pow(ret - targetReturn, 2),
0
);
return Math.sqrt(sumSquaredDownside / returns.length);
}
/**
* Calculate Sharpe ratio
*/
export function sharpeRatio(returns: number[], riskFreeRate: number = 0): number {
if (returns.length < 2) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance =
returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
const stdDev = Math.sqrt(variance);
if (stdDev === 0) return 0;
return (mean - riskFreeRate) / stdDev;
}
/**
* Calculate beta coefficient
*/
export function beta(portfolioReturns: number[], marketReturns: number[]): number {
if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < 2) {
return 0;
}
const n = portfolioReturns.length;
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / n;
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / n;
let covariance = 0;
let marketVariance = 0;
for (let i = 0; i < n; i++) {
const portfolioDiff = portfolioReturns[i] - portfolioMean;
const marketDiff = marketReturns[i] - marketMean;
covariance += portfolioDiff * marketDiff;
marketVariance += marketDiff * marketDiff;
}
return marketVariance === 0 ? 0 : covariance / marketVariance;
}
/**
* Calculate alpha
*/
export function alpha(
portfolioReturns: number[],
marketReturns: number[],
riskFreeRate: number = 0
): number {
const portfolioMean =
portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length;
const portfolioBeta = beta(portfolioReturns, marketReturns);
return portfolioMean - (riskFreeRate + portfolioBeta * (marketMean - riskFreeRate));
}
/**
* Calculate tracking error
*/
export function trackingError(portfolioReturns: number[], benchmarkReturns: number[]): number {
if (portfolioReturns.length !== benchmarkReturns.length || portfolioReturns.length === 0) {
return 0;
}
const activeReturns = portfolioReturns.map((ret, i) => ret - benchmarkReturns[i]);
const mean = activeReturns.reduce((sum, ret) => sum + ret, 0) / activeReturns.length;
const variance =
activeReturns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) /
(activeReturns.length - 1);
return Math.sqrt(variance);
}
/**
* Calculate volatility (standard deviation of returns)
*/
export function volatility(returns: number[]): number {
if (returns.length < 2) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance =
returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / (returns.length - 1);
return Math.sqrt(variance);
}
/**
* Calculate annualized volatility
*/
export function annualizedVolatility(returns: number[], periodsPerYear: number = 252): number {
return volatility(returns) * Math.sqrt(periodsPerYear);
}
/**
* Calculate skewness (measure of asymmetry)
*/
export function skewness(returns: number[]): number {
if (returns.length < 3) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
const stdDev = Math.sqrt(variance);
if (stdDev === 0) return 0;
const skew =
returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 3), 0) / returns.length;
return skew;
}
/**
* Calculate kurtosis (measure of tail heaviness)
*/
export function kurtosis(returns: number[]): number {
if (returns.length < 4) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const variance = returns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / returns.length;
const stdDev = Math.sqrt(variance);
if (stdDev === 0) return 0;
const kurt =
returns.reduce((sum, ret) => sum + Math.pow((ret - mean) / stdDev, 4), 0) / returns.length;
return kurt - 3; // Excess kurtosis (subtract 3 for normal distribution baseline)
}
/**
* Calculate comprehensive risk metrics
*/
export function calculateRiskMetrics(
returns: number[],
equityCurve: number[],
marketReturns?: number[],
riskFreeRate: number = 0
): RiskMetrics {
if (returns.length === 0) {
return {
var95: 0,
var99: 0,
cvar95: 0,
maxDrawdown: 0,
volatility: 0,
downside_deviation: 0,
calmar_ratio: 0,
sortino_ratio: 0,
beta: 0,
alpha: 0,
sharpeRatio: 0,
treynorRatio: 0,
trackingError: 0,
informationRatio: 0,
};
}
const portfolioVolatility = volatility(returns);
const portfolioMean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
// Calculate VaR
const var95Value = valueAtRisk(returns, 0.95);
const var99Value = valueAtRisk(returns, 0.99);
const cvar95Value = conditionalValueAtRisk(returns, 0.95);
// Calculate max drawdown
const maxDD = maxDrawdown(equityCurve);
// Calculate downside deviation
const downsideDeviationValue = downsideDeviation(returns);
// Calculate ratios
const calmarRatio = maxDD > 0 ? portfolioMean / maxDD : 0;
const sortinoRatio =
downsideDeviationValue > 0 ? (portfolioMean - riskFreeRate) / downsideDeviationValue : 0;
const sharpeRatio =
portfolioVolatility > 0 ? (portfolioMean - riskFreeRate) / portfolioVolatility : 0;
let portfolioBeta = 0;
let portfolioAlpha = 0;
let portfolioTreynorRatio = 0;
let portfolioTrackingError = 0;
let informationRatio = 0;
if (marketReturns && marketReturns.length === returns.length) {
portfolioBeta = beta(returns, marketReturns);
portfolioAlpha = alpha(returns, marketReturns, riskFreeRate);
portfolioTreynorRatio = treynorRatio(returns, marketReturns, riskFreeRate);
portfolioTrackingError = trackingError(returns, marketReturns);
informationRatio = portfolioTrackingError > 0 ? portfolioAlpha / portfolioTrackingError : 0;
}
return {
var95: var95Value,
var99: var99Value,
cvar95: cvar95Value,
maxDrawdown: maxDD,
volatility: portfolioVolatility,
downside_deviation: downsideDeviationValue,
calmar_ratio: calmarRatio,
sortino_ratio: sortinoRatio,
beta: portfolioBeta,
alpha: portfolioAlpha,
sharpeRatio,
treynorRatio: portfolioTreynorRatio,
trackingError: portfolioTrackingError,
informationRatio,
};
}
/**
* Helper function to get Z-score for confidence level
* This implementation handles arbitrary confidence levels
*/
function getZScore(confidenceLevel: number): number {
// First check our lookup table for common values (more precise)
const zScores: { [key: string]: number } = {
'0.90': 1.282,
'0.95': 1.645,
'0.975': 1.96,
'0.99': 2.326,
'0.995': 2.576,
};
const key = confidenceLevel.toString();
if (zScores[key]) return zScores[key];
// For arbitrary confidence levels, use approximation
if (confidenceLevel < 0.5) return -getZScore(1 - confidenceLevel);
if (confidenceLevel >= 0.999) return 3.09; // Cap at 99.9% for numerical stability
// Approximation of inverse normal CDF
const y = Math.sqrt(-2.0 * Math.log(1.0 - confidenceLevel));
return (
y -
(2.515517 + 0.802853 * y + 0.010328 * y * y) /
(1.0 + 1.432788 * y + 0.189269 * y * y + 0.001308 * y * y * y)
);
}
/**
* Calculate portfolio risk contribution
*/
export function riskContribution(
weights: number[],
covarianceMatrix: number[][],
portfolioVolatility: number
): number[] {
const n = weights.length;
const contributions: number[] = [];
for (let i = 0; i < n; i++) {
let marginalContribution = 0;
for (let j = 0; j < n; j++) {
marginalContribution += weights[j] * covarianceMatrix[i][j];
}
const contribution = (weights[i] * marginalContribution) / Math.pow(portfolioVolatility, 2);
contributions.push(contribution);
}
return contributions;
}
/**
* Calculate Ulcer Index
*/
export function ulcerIndex(equityCurve: Array<{ value: number; date: Date }>): number {
let sumSquaredDrawdown = 0;
let peak = equityCurve[0].value;
for (const point of equityCurve) {
peak = Math.max(peak, point.value);
const drawdownPercent = ((peak - point.value) / peak) * 100;
sumSquaredDrawdown += drawdownPercent * drawdownPercent;
}
return Math.sqrt(sumSquaredDrawdown / equityCurve.length);
}
/**
* Calculate risk-adjusted return (RAR)
*/
export function riskAdjustedReturn(
portfolioReturn: number,
portfolioRisk: number,
riskFreeRate: number = 0
): number {
if (portfolioRisk === 0) return 0;
return (portfolioReturn - riskFreeRate) / portfolioRisk;
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,55 +1,55 @@
/**
* Date and time utilities for working with market data
*/
export const dateUtils = {
/**
* Check if a date is a trading day (Monday-Friday, non-holiday)
* This is a simplified implementation - a real version would check market holidays
*/
isTradingDay(date: Date): boolean {
const day = date.getDay();
return day > 0 && day < 6; // Mon-Fri
},
/**
* Get the next trading day from a given date
*/
getNextTradingDay(date: Date): Date {
const nextDay = new Date(date);
nextDay.setDate(nextDay.getDate() + 1);
while (!this.isTradingDay(nextDay)) {
nextDay.setDate(nextDay.getDate() + 1);
}
return nextDay;
},
/**
* Get the previous trading day from a given date
*/
getPreviousTradingDay(date: Date): Date {
const prevDay = new Date(date);
prevDay.setDate(prevDay.getDate() - 1);
while (!this.isTradingDay(prevDay)) {
prevDay.setDate(prevDay.getDate() - 1);
}
return prevDay;
},
/**
* Format a date as YYYY-MM-DD
*/
formatDate(date: Date): string {
return date.toISOString().split('T')[0];
},
/**
* Parse a date string in YYYY-MM-DD format
*/
parseDate(dateStr: string): Date {
return new Date(dateStr);
}
};
/**
* Date and time utilities for working with market data
*/
export const dateUtils = {
/**
* Check if a date is a trading day (Monday-Friday, non-holiday)
* This is a simplified implementation - a real version would check market holidays
*/
isTradingDay(date: Date): boolean {
const day = date.getDay();
return day > 0 && day < 6; // Mon-Fri
},
/**
* Get the next trading day from a given date
*/
getNextTradingDay(date: Date): Date {
const nextDay = new Date(date);
nextDay.setDate(nextDay.getDate() + 1);
while (!this.isTradingDay(nextDay)) {
nextDay.setDate(nextDay.getDate() + 1);
}
return nextDay;
},
/**
* Get the previous trading day from a given date
*/
getPreviousTradingDay(date: Date): Date {
const prevDay = new Date(date);
prevDay.setDate(prevDay.getDate() - 1);
while (!this.isTradingDay(prevDay)) {
prevDay.setDate(prevDay.getDate() - 1);
}
return prevDay;
},
/**
* Format a date as YYYY-MM-DD
*/
formatDate(date: Date): string {
return date.toISOString().split('T')[0];
},
/**
* Parse a date string in YYYY-MM-DD format
*/
parseDate(dateStr: string): Date {
return new Date(dateStr);
},
};

View file

@ -1,2 +1,2 @@
export * from './dateUtils';
export * from './calculations/index';
export * from './dateUtils';
export * from './calculations/index';

View file

@ -1,403 +1,401 @@
/**
* Test suite for position sizing calculations
*/
import { describe, it, expect } from 'bun:test';
import {
fixedRiskPositionSize,
kellyPositionSize,
fractionalKellyPositionSize,
volatilityTargetPositionSize,
equalWeightPositionSize,
atrBasedPositionSize,
expectancyPositionSize,
monteCarloPositionSize,
sharpeOptimizedPositionSize,
fixedFractionalPositionSize,
volatilityAdjustedPositionSize,
correlationAdjustedPositionSize,
calculatePortfolioHeat,
dynamicPositionSize,
liquidityConstrainedPositionSize,
multiTimeframePositionSize,
riskParityPositionSize,
validatePositionSize,
type PositionSizeParams,
type KellyParams,
type VolatilityParams
} from '../../src/calculations/position-sizing';
describe('Position Sizing Calculations', () => {
describe('fixedRiskPositionSize', () => {
it('should calculate correct position size for long position', () => {
const params: PositionSizeParams = {
accountSize: 100000,
riskPercentage: 2,
entryPrice: 100,
stopLoss: 95,
leverage: 1
};
const result = fixedRiskPositionSize(params);
// Risk amount: 100000 * 0.02 = 2000
// Risk per share: 100 - 95 = 5
// Position size: 2000 / 5 = 400 shares
expect(result).toBe(400);
});
it('should calculate correct position size for short position', () => {
const params: PositionSizeParams = {
accountSize: 100000,
riskPercentage: 2,
entryPrice: 100,
stopLoss: 105,
leverage: 1
};
const result = fixedRiskPositionSize(params);
// Risk per share: |100 - 105| = 5
// Position size: 2000 / 5 = 400 shares
expect(result).toBe(400);
});
it('should return 0 for invalid inputs', () => {
const params: PositionSizeParams = {
accountSize: 0,
riskPercentage: 2,
entryPrice: 100,
stopLoss: 95
};
expect(fixedRiskPositionSize(params)).toBe(0);
});
it('should return 0 when entry price equals stop loss', () => {
const params: PositionSizeParams = {
accountSize: 100000,
riskPercentage: 2,
entryPrice: 100,
stopLoss: 100
};
expect(fixedRiskPositionSize(params)).toBe(0);
});
});
describe('kellyPositionSize', () => {
it('should calculate correct Kelly position size', () => {
const params: KellyParams = {
winRate: 0.6,
averageWin: 150,
averageLoss: -100
};
const result = kellyPositionSize(params, 100000);
// Kelly formula: f = (bp - q) / b
// b = 150/100 = 1.5, p = 0.6, q = 0.4
// f = (1.5 * 0.6 - 0.4) / 1.5 = (0.9 - 0.4) / 1.5 = 0.5 / 1.5 = 0.333
// With safety factor of 0.25: 0.333 * 0.25 = 0.083
// Capped at 0.25, so result should be 0.083
// Position: 100000 * 0.083 = 8300
expect(result).toBeCloseTo(8333, 0);
});
it('should return 0 for negative expectancy', () => {
const params: KellyParams = {
winRate: 0.3,
averageWin: 100,
averageLoss: -200
};
const result = kellyPositionSize(params, 100000);
expect(result).toBe(0);
});
it('should return 0 for invalid inputs', () => {
const params: KellyParams = {
winRate: 0,
averageWin: 100,
averageLoss: -100
};
expect(kellyPositionSize(params, 100000)).toBe(0);
});
});
describe('volatilityTargetPositionSize', () => {
it('should calculate correct volatility-targeted position size', () => {
const params: VolatilityParams = {
price: 100,
volatility: 0.20,
targetVolatility: 0.10,
lookbackDays: 30
};
const result = volatilityTargetPositionSize(params, 100000);
// Volatility ratio: 0.10 / 0.20 = 0.5
// Position value: 100000 * 0.5 = 50000
// Position size: 50000 / 100 = 500 shares
expect(result).toBe(500);
});
it('should cap leverage at 2x', () => {
const params: VolatilityParams = {
price: 100,
volatility: 0.05,
targetVolatility: 0.20,
lookbackDays: 30
};
const result = volatilityTargetPositionSize(params, 100000);
// Volatility ratio would be 4, but capped at 2
// Position value: 100000 * 2 = 200000
// Position size: 200000 / 100 = 2000 shares
expect(result).toBe(2000);
});
});
describe('equalWeightPositionSize', () => {
it('should calculate equal weight position size', () => {
const result = equalWeightPositionSize(100000, 5, 100);
// Position value per asset: 100000 / 5 = 20000
// Position size: 20000 / 100 = 200 shares
expect(result).toBe(200);
});
it('should return 0 for invalid inputs', () => {
expect(equalWeightPositionSize(100000, 0, 100)).toBe(0);
expect(equalWeightPositionSize(100000, 5, 0)).toBe(0);
});
});
describe('atrBasedPositionSize', () => {
it('should calculate ATR-based position size', () => {
const result = atrBasedPositionSize(100000, 2, 5, 2, 100);
// Risk amount: 100000 * 0.02 = 2000
// Stop distance: 5 * 2 = 10
// Position size: 2000 / 10 = 200 shares
expect(result).toBe(200);
});
it('should return 0 for zero ATR', () => {
const result = atrBasedPositionSize(100000, 2, 0, 2, 100);
expect(result).toBe(0);
});
});
describe('expectancyPositionSize', () => {
it('should calculate expectancy-based position size', () => {
const result = expectancyPositionSize(100000, 0.6, 150, -100, 5);
// Expectancy: 0.6 * 150 - 0.4 * 100 = 90 - 40 = 50
// Expectancy ratio: 50 / 100 = 0.5
// Risk percentage: min(0.5 * 0.5, 5) = min(0.25, 5) = 0.25
// Position: 100000 * 0.0025 = 250
expect(result).toBe(250);
});
it('should return 0 for negative expectancy', () => {
const result = expectancyPositionSize(100000, 0.3, 100, -200);
expect(result).toBe(0);
});
});
describe('correlationAdjustedPositionSize', () => {
it('should adjust position size based on correlation', () => {
const existingPositions = [
{ size: 1000, correlation: 0.5 },
{ size: 500, correlation: 0.3 }
];
const result = correlationAdjustedPositionSize(1000, existingPositions, 0.5);
// Should reduce position size based on correlation risk
expect(result).toBeLessThan(1000);
expect(result).toBeGreaterThan(0);
});
it('should return original size when no existing positions', () => {
const result = correlationAdjustedPositionSize(1000, [], 0.5);
expect(result).toBe(1000);
});
});
describe('calculatePortfolioHeat', () => {
it('should calculate portfolio heat correctly', () => {
const positions = [
{ value: 10000, risk: 500 },
{ value: 15000, risk: 750 },
{ value: 20000, risk: 1000 }
];
const result = calculatePortfolioHeat(positions, 100000);
// Total risk: 500 + 750 + 1000 = 2250
// Heat: (2250 / 100000) * 100 = 2.25%
expect(result).toBe(2.25);
});
it('should handle empty positions array', () => {
const result = calculatePortfolioHeat([], 100000);
expect(result).toBe(0);
});
it('should cap heat at 100%', () => {
const positions = [
{ value: 50000, risk: 150000 }
];
const result = calculatePortfolioHeat(positions, 100000);
expect(result).toBe(100);
});
});
describe('dynamicPositionSize', () => {
it('should adjust position size based on market conditions', () => {
const result = dynamicPositionSize(1000, 0.25, 0.15, 0.05, 0.10);
// Volatility adjustment: 0.15 / 0.25 = 0.6
// Drawdown adjustment: 1 - (0.05 / 0.10) = 0.5
// Adjusted size: 1000 * 0.6 * 0.5 = 300
expect(result).toBe(300);
});
it('should handle high drawdown', () => {
const result = dynamicPositionSize(1000, 0.20, 0.15, 0.15, 0.10);
// Should significantly reduce position size due to high drawdown
expect(result).toBeLessThan(500);
});
});
describe('liquidityConstrainedPositionSize', () => {
it('should constrain position size based on liquidity', () => {
const result = liquidityConstrainedPositionSize(1000, 10000, 0.05, 100);
// Max shares: 10000 * 0.05 = 500
// Should return min(1000, 500) = 500
expect(result).toBe(500);
});
it('should return desired size when liquidity allows', () => {
const result = liquidityConstrainedPositionSize(500, 20000, 0.05, 100);
// Max shares: 20000 * 0.05 = 1000
// Should return min(500, 1000) = 500
expect(result).toBe(500);
});
});
describe('multiTimeframePositionSize', () => {
it('should weight signals correctly', () => {
const result = multiTimeframePositionSize(100000, 0.8, 0.6, 0.4, 2);
// Weighted signal: 0.8 * 0.2 + 0.6 * 0.3 + 0.4 * 0.5 = 0.16 + 0.18 + 0.2 = 0.54
// Adjusted risk: 2 * 0.54 = 1.08%
// Position: 100000 * 0.0108 = 1080
expect(result).toBe(1080);
});
it('should clamp signals to valid range', () => {
const result = multiTimeframePositionSize(100000, 2, -2, 1.5, 2);
// Signals should be clamped to [-1, 1]
// Weighted: 1 * 0.2 + (-1) * 0.3 + 1 * 0.5 = 0.2 - 0.3 + 0.5 = 0.4
// Adjusted risk: 2 * 0.4 = 0.8%
expect(result).toBe(800);
});
});
describe('riskParityPositionSize', () => {
it('should allocate based on inverse volatility', () => {
const assets = [
{ volatility: 0.10, price: 100 },
{ volatility: 0.20, price: 200 }
];
const result = riskParityPositionSize(assets, 0.15, 100000);
// Asset 1: 1/0.10 = 10, Asset 2: 1/0.20 = 5
// Total inverse vol: 15
// Weights: Asset 1: 10/15 = 0.667, Asset 2: 5/15 = 0.333
expect(result).toHaveLength(2);
expect(result[0]).toBeGreaterThan(result[1]);
});
it('should handle zero volatility assets', () => {
const assets = [
{ volatility: 0, price: 100 },
{ volatility: 0.20, price: 200 }
];
const result = riskParityPositionSize(assets, 0.15, 100000);
expect(result[0]).toBe(0);
expect(result[1]).toBeGreaterThan(0);
});
});
describe('sharpeOptimizedPositionSize', () => {
it('should calculate position size based on Sharpe optimization', () => {
const result = sharpeOptimizedPositionSize(100000, 0.15, 0.20, 0.02, 3);
// Kelly formula for continuous returns: f = (μ - r) / σ²
// Expected return: 0.15, Risk-free: 0.02, Volatility: 0.20
// f = (0.15 - 0.02) / (0.20)² = 0.13 / 0.04 = 3.25
// But capped at maxLeverage=3, so should be 3.0
// Final position: 100000 * 3 = 300000
expect(result).toBe(300000);
});
it('should return 0 for invalid inputs', () => {
// Invalid volatility
expect(sharpeOptimizedPositionSize(100000, 0.15, 0, 0.02)).toBe(0);
// Invalid account size
expect(sharpeOptimizedPositionSize(0, 0.15, 0.20, 0.02)).toBe(0);
// Expected return less than risk-free rate
expect(sharpeOptimizedPositionSize(100000, 0.01, 0.20, 0.02)).toBe(0);
});
it('should respect maximum leverage', () => {
const result = sharpeOptimizedPositionSize(100000, 0.30, 0.20, 0.02, 2);
// Kelly fraction would be (0.30 - 0.02) / (0.20)² = 7, but capped at 2
// Position: 100000 * 2 = 200000
expect(result).toBe(200000);
});
});
describe('validatePositionSize', () => {
it('should validate position size against limits', () => {
const result = validatePositionSize(500, 100, 100000, 10, 2);
// Position value: 500 * 100 = 50000 (50% of account)
// This exceeds 10% limit
expect(result.isValid).toBe(false);
expect(result.violations).toContain('Position exceeds maximum 10% of account');
expect(result.adjustedSize).toBe(100); // 10000 / 100
});
it('should pass validation for reasonable position', () => {
const result = validatePositionSize(50, 100, 100000, 10, 2);
// Position value: 50 * 100 = 5000 (5% of account)
expect(result.isValid).toBe(true);
expect(result.violations).toHaveLength(0);
expect(result.adjustedSize).toBe(50);
});
it('should handle fractional shares', () => {
const result = validatePositionSize(0.5, 100, 100000, 10, 2);
expect(result.isValid).toBe(false);
expect(result.violations).toContain('Position size too small (less than 1 share)');
expect(result.adjustedSize).toBe(0);
});
});
});
/**
* Test suite for position sizing calculations
*/
import { describe, expect, it } from 'bun:test';
import {
atrBasedPositionSize,
calculatePortfolioHeat,
correlationAdjustedPositionSize,
dynamicPositionSize,
equalWeightPositionSize,
expectancyPositionSize,
fixedFractionalPositionSize,
fixedRiskPositionSize,
fractionalKellyPositionSize,
kellyPositionSize,
liquidityConstrainedPositionSize,
monteCarloPositionSize,
multiTimeframePositionSize,
riskParityPositionSize,
sharpeOptimizedPositionSize,
validatePositionSize,
volatilityAdjustedPositionSize,
volatilityTargetPositionSize,
type KellyParams,
type PositionSizeParams,
type VolatilityParams,
} from '../../src/calculations/position-sizing';
describe('Position Sizing Calculations', () => {
describe('fixedRiskPositionSize', () => {
it('should calculate correct position size for long position', () => {
const params: PositionSizeParams = {
accountSize: 100000,
riskPercentage: 2,
entryPrice: 100,
stopLoss: 95,
leverage: 1,
};
const result = fixedRiskPositionSize(params);
// Risk amount: 100000 * 0.02 = 2000
// Risk per share: 100 - 95 = 5
// Position size: 2000 / 5 = 400 shares
expect(result).toBe(400);
});
it('should calculate correct position size for short position', () => {
const params: PositionSizeParams = {
accountSize: 100000,
riskPercentage: 2,
entryPrice: 100,
stopLoss: 105,
leverage: 1,
};
const result = fixedRiskPositionSize(params);
// Risk per share: |100 - 105| = 5
// Position size: 2000 / 5 = 400 shares
expect(result).toBe(400);
});
it('should return 0 for invalid inputs', () => {
const params: PositionSizeParams = {
accountSize: 0,
riskPercentage: 2,
entryPrice: 100,
stopLoss: 95,
};
expect(fixedRiskPositionSize(params)).toBe(0);
});
it('should return 0 when entry price equals stop loss', () => {
const params: PositionSizeParams = {
accountSize: 100000,
riskPercentage: 2,
entryPrice: 100,
stopLoss: 100,
};
expect(fixedRiskPositionSize(params)).toBe(0);
});
});
describe('kellyPositionSize', () => {
it('should calculate correct Kelly position size', () => {
const params: KellyParams = {
winRate: 0.6,
averageWin: 150,
averageLoss: -100,
};
const result = kellyPositionSize(params, 100000);
// Kelly formula: f = (bp - q) / b
// b = 150/100 = 1.5, p = 0.6, q = 0.4
// f = (1.5 * 0.6 - 0.4) / 1.5 = (0.9 - 0.4) / 1.5 = 0.5 / 1.5 = 0.333
// With safety factor of 0.25: 0.333 * 0.25 = 0.083
// Capped at 0.25, so result should be 0.083
// Position: 100000 * 0.083 = 8300
expect(result).toBeCloseTo(8333, 0);
});
it('should return 0 for negative expectancy', () => {
const params: KellyParams = {
winRate: 0.3,
averageWin: 100,
averageLoss: -200,
};
const result = kellyPositionSize(params, 100000);
expect(result).toBe(0);
});
it('should return 0 for invalid inputs', () => {
const params: KellyParams = {
winRate: 0,
averageWin: 100,
averageLoss: -100,
};
expect(kellyPositionSize(params, 100000)).toBe(0);
});
});
describe('volatilityTargetPositionSize', () => {
it('should calculate correct volatility-targeted position size', () => {
const params: VolatilityParams = {
price: 100,
volatility: 0.2,
targetVolatility: 0.1,
lookbackDays: 30,
};
const result = volatilityTargetPositionSize(params, 100000);
// Volatility ratio: 0.10 / 0.20 = 0.5
// Position value: 100000 * 0.5 = 50000
// Position size: 50000 / 100 = 500 shares
expect(result).toBe(500);
});
it('should cap leverage at 2x', () => {
const params: VolatilityParams = {
price: 100,
volatility: 0.05,
targetVolatility: 0.2,
lookbackDays: 30,
};
const result = volatilityTargetPositionSize(params, 100000);
// Volatility ratio would be 4, but capped at 2
// Position value: 100000 * 2 = 200000
// Position size: 200000 / 100 = 2000 shares
expect(result).toBe(2000);
});
});
describe('equalWeightPositionSize', () => {
it('should calculate equal weight position size', () => {
const result = equalWeightPositionSize(100000, 5, 100);
// Position value per asset: 100000 / 5 = 20000
// Position size: 20000 / 100 = 200 shares
expect(result).toBe(200);
});
it('should return 0 for invalid inputs', () => {
expect(equalWeightPositionSize(100000, 0, 100)).toBe(0);
expect(equalWeightPositionSize(100000, 5, 0)).toBe(0);
});
});
describe('atrBasedPositionSize', () => {
it('should calculate ATR-based position size', () => {
const result = atrBasedPositionSize(100000, 2, 5, 2, 100);
// Risk amount: 100000 * 0.02 = 2000
// Stop distance: 5 * 2 = 10
// Position size: 2000 / 10 = 200 shares
expect(result).toBe(200);
});
it('should return 0 for zero ATR', () => {
const result = atrBasedPositionSize(100000, 2, 0, 2, 100);
expect(result).toBe(0);
});
});
describe('expectancyPositionSize', () => {
it('should calculate expectancy-based position size', () => {
const result = expectancyPositionSize(100000, 0.6, 150, -100, 5);
// Expectancy: 0.6 * 150 - 0.4 * 100 = 90 - 40 = 50
// Expectancy ratio: 50 / 100 = 0.5
// Risk percentage: min(0.5 * 0.5, 5) = min(0.25, 5) = 0.25
// Position: 100000 * 0.0025 = 250
expect(result).toBe(250);
});
it('should return 0 for negative expectancy', () => {
const result = expectancyPositionSize(100000, 0.3, 100, -200);
expect(result).toBe(0);
});
});
describe('correlationAdjustedPositionSize', () => {
it('should adjust position size based on correlation', () => {
const existingPositions = [
{ size: 1000, correlation: 0.5 },
{ size: 500, correlation: 0.3 },
];
const result = correlationAdjustedPositionSize(1000, existingPositions, 0.5);
// Should reduce position size based on correlation risk
expect(result).toBeLessThan(1000);
expect(result).toBeGreaterThan(0);
});
it('should return original size when no existing positions', () => {
const result = correlationAdjustedPositionSize(1000, [], 0.5);
expect(result).toBe(1000);
});
});
describe('calculatePortfolioHeat', () => {
it('should calculate portfolio heat correctly', () => {
const positions = [
{ value: 10000, risk: 500 },
{ value: 15000, risk: 750 },
{ value: 20000, risk: 1000 },
];
const result = calculatePortfolioHeat(positions, 100000);
// Total risk: 500 + 750 + 1000 = 2250
// Heat: (2250 / 100000) * 100 = 2.25%
expect(result).toBe(2.25);
});
it('should handle empty positions array', () => {
const result = calculatePortfolioHeat([], 100000);
expect(result).toBe(0);
});
it('should cap heat at 100%', () => {
const positions = [{ value: 50000, risk: 150000 }];
const result = calculatePortfolioHeat(positions, 100000);
expect(result).toBe(100);
});
});
describe('dynamicPositionSize', () => {
it('should adjust position size based on market conditions', () => {
const result = dynamicPositionSize(1000, 0.25, 0.15, 0.05, 0.1);
// Volatility adjustment: 0.15 / 0.25 = 0.6
// Drawdown adjustment: 1 - (0.05 / 0.10) = 0.5
// Adjusted size: 1000 * 0.6 * 0.5 = 300
expect(result).toBe(300);
});
it('should handle high drawdown', () => {
const result = dynamicPositionSize(1000, 0.2, 0.15, 0.15, 0.1);
// Should significantly reduce position size due to high drawdown
expect(result).toBeLessThan(500);
});
});
describe('liquidityConstrainedPositionSize', () => {
it('should constrain position size based on liquidity', () => {
const result = liquidityConstrainedPositionSize(1000, 10000, 0.05, 100);
// Max shares: 10000 * 0.05 = 500
// Should return min(1000, 500) = 500
expect(result).toBe(500);
});
it('should return desired size when liquidity allows', () => {
const result = liquidityConstrainedPositionSize(500, 20000, 0.05, 100);
// Max shares: 20000 * 0.05 = 1000
// Should return min(500, 1000) = 500
expect(result).toBe(500);
});
});
describe('multiTimeframePositionSize', () => {
it('should weight signals correctly', () => {
const result = multiTimeframePositionSize(100000, 0.8, 0.6, 0.4, 2);
// Weighted signal: 0.8 * 0.2 + 0.6 * 0.3 + 0.4 * 0.5 = 0.16 + 0.18 + 0.2 = 0.54
// Adjusted risk: 2 * 0.54 = 1.08%
// Position: 100000 * 0.0108 = 1080
expect(result).toBe(1080);
});
it('should clamp signals to valid range', () => {
const result = multiTimeframePositionSize(100000, 2, -2, 1.5, 2);
// Signals should be clamped to [-1, 1]
// Weighted: 1 * 0.2 + (-1) * 0.3 + 1 * 0.5 = 0.2 - 0.3 + 0.5 = 0.4
// Adjusted risk: 2 * 0.4 = 0.8%
expect(result).toBe(800);
});
});
describe('riskParityPositionSize', () => {
it('should allocate based on inverse volatility', () => {
const assets = [
{ volatility: 0.1, price: 100 },
{ volatility: 0.2, price: 200 },
];
const result = riskParityPositionSize(assets, 0.15, 100000);
// Asset 1: 1/0.10 = 10, Asset 2: 1/0.20 = 5
// Total inverse vol: 15
// Weights: Asset 1: 10/15 = 0.667, Asset 2: 5/15 = 0.333
expect(result).toHaveLength(2);
expect(result[0]).toBeGreaterThan(result[1]);
});
it('should handle zero volatility assets', () => {
const assets = [
{ volatility: 0, price: 100 },
{ volatility: 0.2, price: 200 },
];
const result = riskParityPositionSize(assets, 0.15, 100000);
expect(result[0]).toBe(0);
expect(result[1]).toBeGreaterThan(0);
});
});
describe('sharpeOptimizedPositionSize', () => {
it('should calculate position size based on Sharpe optimization', () => {
const result = sharpeOptimizedPositionSize(100000, 0.15, 0.2, 0.02, 3);
// Kelly formula for continuous returns: f = (μ - r) / σ²
// Expected return: 0.15, Risk-free: 0.02, Volatility: 0.20
// f = (0.15 - 0.02) / (0.20)² = 0.13 / 0.04 = 3.25
// But capped at maxLeverage=3, so should be 3.0
// Final position: 100000 * 3 = 300000
expect(result).toBe(300000);
});
it('should return 0 for invalid inputs', () => {
// Invalid volatility
expect(sharpeOptimizedPositionSize(100000, 0.15, 0, 0.02)).toBe(0);
// Invalid account size
expect(sharpeOptimizedPositionSize(0, 0.15, 0.2, 0.02)).toBe(0);
// Expected return less than risk-free rate
expect(sharpeOptimizedPositionSize(100000, 0.01, 0.2, 0.02)).toBe(0);
});
it('should respect maximum leverage', () => {
const result = sharpeOptimizedPositionSize(100000, 0.3, 0.2, 0.02, 2);
// Kelly fraction would be (0.30 - 0.02) / (0.20)² = 7, but capped at 2
// Position: 100000 * 2 = 200000
expect(result).toBe(200000);
});
});
describe('validatePositionSize', () => {
it('should validate position size against limits', () => {
const result = validatePositionSize(500, 100, 100000, 10, 2);
// Position value: 500 * 100 = 50000 (50% of account)
// This exceeds 10% limit
expect(result.isValid).toBe(false);
expect(result.violations).toContain('Position exceeds maximum 10% of account');
expect(result.adjustedSize).toBe(100); // 10000 / 100
});
it('should pass validation for reasonable position', () => {
const result = validatePositionSize(50, 100, 100000, 10, 2);
// Position value: 50 * 100 = 5000 (5% of account)
expect(result.isValid).toBe(true);
expect(result.violations).toHaveLength(0);
expect(result.adjustedSize).toBe(50);
});
it('should handle fractional shares', () => {
const result = validatePositionSize(0.5, 100, 100000, 10, 2);
expect(result.isValid).toBe(false);
expect(result.violations).toContain('Position size too small (less than 1 share)');
expect(result.adjustedSize).toBe(0);
});
});
});

View file

@ -1,80 +1,80 @@
import { describe, it, expect } from 'bun:test';
import { dateUtils } from '../src/dateUtils';
describe('dateUtils', () => {
describe('isTradingDay', () => {
it('should return true for weekdays (Monday-Friday)', () => {
// Monday (June 2, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 2))).toBe(true);
// Tuesday (June 3, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 3))).toBe(true);
// Wednesday (June 4, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 4))).toBe(true);
// Thursday (June 5, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 5))).toBe(true);
// Friday (June 6, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 6))).toBe(true);
});
it('should return false for weekends (Saturday-Sunday)', () => {
// Saturday (June 7, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 7))).toBe(false);
// Sunday (June 8, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 8))).toBe(false);
});
});
describe('getNextTradingDay', () => {
it('should return the next day when current day is a weekday and next day is a weekday', () => {
// Monday -> Tuesday
const monday = new Date(2025, 5, 2);
const tuesday = new Date(2025, 5, 3);
expect(dateUtils.getNextTradingDay(monday).toDateString()).toBe(tuesday.toDateString());
});
it('should skip weekends when getting next trading day', () => {
// Friday -> Monday
const friday = new Date(2025, 5, 6);
const monday = new Date(2025, 5, 9);
expect(dateUtils.getNextTradingDay(friday).toDateString()).toBe(monday.toDateString());
});
it('should handle weekends as input correctly', () => {
// Saturday -> Monday
const saturday = new Date(2025, 5, 7);
const monday = new Date(2025, 5, 9);
expect(dateUtils.getNextTradingDay(saturday).toDateString()).toBe(monday.toDateString());
// Sunday -> Monday
const sunday = new Date(2025, 5, 8);
expect(dateUtils.getNextTradingDay(sunday).toDateString()).toBe(monday.toDateString());
});
});
describe('getPreviousTradingDay', () => {
it('should return the previous day when current day is a weekday and previous day is a weekday', () => {
// Tuesday -> Monday
const tuesday = new Date(2025, 5, 3);
const monday = new Date(2025, 5, 2);
expect(dateUtils.getPreviousTradingDay(tuesday).toDateString()).toBe(monday.toDateString());
});
it('should skip weekends when getting previous trading day', () => {
// Monday -> Friday
const monday = new Date(2025, 5, 9);
const friday = new Date(2025, 5, 6);
expect(dateUtils.getPreviousTradingDay(monday).toDateString()).toBe(friday.toDateString());
});
it('should handle weekends as input correctly', () => {
// Saturday -> Friday
const saturday = new Date(2025, 5, 7);
const friday = new Date(2025, 5, 6);
expect(dateUtils.getPreviousTradingDay(saturday).toDateString()).toBe(friday.toDateString());
// Sunday -> Friday
const sunday = new Date(2025, 5, 8);
expect(dateUtils.getPreviousTradingDay(sunday).toDateString()).toBe(friday.toDateString());
});
});
});
import { describe, expect, it } from 'bun:test';
import { dateUtils } from '../src/dateUtils';
describe('dateUtils', () => {
describe('isTradingDay', () => {
it('should return true for weekdays (Monday-Friday)', () => {
// Monday (June 2, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 2))).toBe(true);
// Tuesday (June 3, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 3))).toBe(true);
// Wednesday (June 4, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 4))).toBe(true);
// Thursday (June 5, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 5))).toBe(true);
// Friday (June 6, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 6))).toBe(true);
});
it('should return false for weekends (Saturday-Sunday)', () => {
// Saturday (June 7, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 7))).toBe(false);
// Sunday (June 8, 2025)
expect(dateUtils.isTradingDay(new Date(2025, 5, 8))).toBe(false);
});
});
describe('getNextTradingDay', () => {
it('should return the next day when current day is a weekday and next day is a weekday', () => {
// Monday -> Tuesday
const monday = new Date(2025, 5, 2);
const tuesday = new Date(2025, 5, 3);
expect(dateUtils.getNextTradingDay(monday).toDateString()).toBe(tuesday.toDateString());
});
it('should skip weekends when getting next trading day', () => {
// Friday -> Monday
const friday = new Date(2025, 5, 6);
const monday = new Date(2025, 5, 9);
expect(dateUtils.getNextTradingDay(friday).toDateString()).toBe(monday.toDateString());
});
it('should handle weekends as input correctly', () => {
// Saturday -> Monday
const saturday = new Date(2025, 5, 7);
const monday = new Date(2025, 5, 9);
expect(dateUtils.getNextTradingDay(saturday).toDateString()).toBe(monday.toDateString());
// Sunday -> Monday
const sunday = new Date(2025, 5, 8);
expect(dateUtils.getNextTradingDay(sunday).toDateString()).toBe(monday.toDateString());
});
});
describe('getPreviousTradingDay', () => {
it('should return the previous day when current day is a weekday and previous day is a weekday', () => {
// Tuesday -> Monday
const tuesday = new Date(2025, 5, 3);
const monday = new Date(2025, 5, 2);
expect(dateUtils.getPreviousTradingDay(tuesday).toDateString()).toBe(monday.toDateString());
});
it('should skip weekends when getting previous trading day', () => {
// Monday -> Friday
const monday = new Date(2025, 5, 9);
const friday = new Date(2025, 5, 6);
expect(dateUtils.getPreviousTradingDay(monday).toDateString()).toBe(friday.toDateString());
});
it('should handle weekends as input correctly', () => {
// Saturday -> Friday
const saturday = new Date(2025, 5, 7);
const friday = new Date(2025, 5, 6);
expect(dateUtils.getPreviousTradingDay(saturday).toDateString()).toBe(friday.toDateString());
// Sunday -> Friday
const sunday = new Date(2025, 5, 8);
expect(dateUtils.getPreviousTradingDay(sunday).toDateString()).toBe(friday.toDateString());
});
});
});

View file

@ -1,393 +1,395 @@
import { getLogger } from '@stock-bot/logger';
import { DataFrame } from '@stock-bot/data-frame';
import { atr, sma, ema, rsi, macd, bollingerBands } from '@stock-bot/utils';
// Vector operations interface
export interface VectorOperation {
name: string;
inputs: string[];
output: string;
operation: (inputs: number[][]) => number[];
}
// Vectorized strategy context
export interface VectorizedContext {
data: DataFrame;
lookback: number;
indicators: Record<string, number[]>;
signals: Record<string, number[]>;
}
// Performance metrics for vectorized backtesting
export interface VectorizedMetrics {
totalReturns: number;
sharpeRatio: number;
maxDrawdown: number;
winRate: number;
profitFactor: number;
totalTrades: number;
avgTrade: number;
returns: number[];
drawdown: number[];
equity: number[];
}
// Vectorized backtest result
export interface VectorizedBacktestResult {
metrics: VectorizedMetrics;
trades: VectorizedTrade[];
equity: number[];
timestamps: number[];
signals: Record<string, number[]>;
}
export interface VectorizedTrade {
entryIndex: number;
exitIndex: number;
entryPrice: number;
exitPrice: number;
quantity: number;
side: 'LONG' | 'SHORT';
pnl: number;
return: number;
duration: number;
}
// Vectorized strategy engine
export class VectorEngine {
private logger = getLogger('vector-engine');
private operations: Map<string, VectorOperation> = new Map();
constructor() {
this.registerDefaultOperations();
}
private registerDefaultOperations(): void {
// Register common mathematical operations
this.registerOperation({
name: 'add',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val + b[i])
});
this.registerOperation({
name: 'subtract',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val - b[i])
});
this.registerOperation({
name: 'multiply',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val * b[i])
});
this.registerOperation({
name: 'divide',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => b[i] !== 0 ? val / b[i] : NaN)
});
// Register comparison operations
this.registerOperation({
name: 'greater_than',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val > b[i] ? 1 : 0)
});
this.registerOperation({
name: 'less_than',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val < b[i] ? 1 : 0)
});
this.registerOperation({
name: 'crossover',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => {
const result = new Array(a.length).fill(0);
for (let i = 1; i < a.length; i++) {
if (a[i] > b[i] && a[i - 1] <= b[i - 1]) {
result[i] = 1;
}
}
return result;
}
});
this.registerOperation({
name: 'crossunder',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => {
const result = new Array(a.length).fill(0);
for (let i = 1; i < a.length; i++) {
if (a[i] < b[i] && a[i - 1] >= b[i - 1]) {
result[i] = 1;
}
}
return result;
}
});
}
registerOperation(operation: VectorOperation): void {
this.operations.set(operation.name, operation);
this.logger.debug(`Registered operation: ${operation.name}`);
}
// Execute vectorized strategy
async executeVectorizedStrategy(
data: DataFrame,
strategyCode: string
): Promise<VectorizedBacktestResult> {
try {
const context = this.prepareContext(data);
const signals = this.executeStrategy(context, strategyCode);
const trades = this.generateTrades(data, signals);
const metrics = this.calculateMetrics(data, trades);
return {
metrics,
trades,
equity: metrics.equity,
timestamps: data.getColumn('timestamp'),
signals
};
} catch (error) {
this.logger.error('Vectorized strategy execution failed', error);
throw error;
}
}
private prepareContext(data: DataFrame): VectorizedContext {
const close = data.getColumn('close');
const high = data.getColumn('high');
const low = data.getColumn('low');
const volume = data.getColumn('volume');
// Calculate common indicators
const indicators: Record<string, number[]> = {
sma_20: sma(close, 20),
sma_50: sma(close, 50),
ema_12: ema(close, 12),
ema_26: ema(close, 26),
rsi: rsi(close),
};
const m = macd(close);
indicators.macd = m.macd;
indicators.macd_signal = m.signal;
indicators.macd_histogram = m.histogram;
const bb = bollingerBands(close);
indicators.bb_upper = bb.upper;
indicators.bb_middle = bb.middle;
indicators.bb_lower = bb.lower;
return {
data,
lookback: 100,
indicators,
signals: {}
};
}
private executeStrategy(context: VectorizedContext, strategyCode: string): Record<string, number[]> {
// This is a simplified strategy execution
// In production, you'd want a more sophisticated strategy compiler/interpreter
const signals: Record<string, number[]> = {
buy: new Array(context.data.length).fill(0),
sell: new Array(context.data.length).fill(0)
};
// Example: Simple moving average crossover strategy
if (strategyCode.includes('sma_crossover')) {
const sma20 = context.indicators.sma_20;
const sma50 = context.indicators.sma_50;
for (let i = 1; i < sma20.length; i++) {
// Buy signal: SMA20 crosses above SMA50
if (!isNaN(sma20[i]) && !isNaN(sma50[i]) &&
!isNaN(sma20[i-1]) && !isNaN(sma50[i-1])) {
if (sma20[i] > sma50[i] && sma20[i-1] <= sma50[i-1]) {
signals.buy[i] = 1;
}
// Sell signal: SMA20 crosses below SMA50
else if (sma20[i] < sma50[i] && sma20[i-1] >= sma50[i-1]) {
signals.sell[i] = 1;
}
}
}
}
return signals;
}
private generateTrades(data: DataFrame, signals: Record<string, number[]>): VectorizedTrade[] {
const trades: VectorizedTrade[] = [];
const close = data.getColumn('close');
const timestamps = data.getColumn('timestamp');
let position: { index: number; price: number; side: 'LONG' | 'SHORT' } | null = null;
for (let i = 0; i < close.length; i++) {
if (signals.buy[i] === 1 && !position) {
// Open long position
position = {
index: i,
price: close[i],
side: 'LONG'
};
} else if (signals.sell[i] === 1) {
if (position && position.side === 'LONG') {
// Close long position
const trade: VectorizedTrade = {
entryIndex: position.index,
exitIndex: i,
entryPrice: position.price,
exitPrice: close[i],
quantity: 1, // Simplified: always trade 1 unit
side: 'LONG',
pnl: close[i] - position.price,
return: (close[i] - position.price) / position.price,
duration: timestamps[i] - timestamps[position.index]
};
trades.push(trade);
position = null;
} else if (!position) {
// Open short position
position = {
index: i,
price: close[i],
side: 'SHORT'
};
}
} else if (signals.buy[i] === 1 && position && position.side === 'SHORT') {
// Close short position
const trade: VectorizedTrade = {
entryIndex: position.index,
exitIndex: i,
entryPrice: position.price,
exitPrice: close[i],
quantity: 1,
side: 'SHORT',
pnl: position.price - close[i],
return: (position.price - close[i]) / position.price,
duration: timestamps[i] - timestamps[position.index]
};
trades.push(trade);
position = null;
}
}
return trades;
}
private calculateMetrics(data: DataFrame, trades: VectorizedTrade[]): VectorizedMetrics {
if (trades.length === 0) {
return {
totalReturns: 0,
sharpeRatio: 0,
maxDrawdown: 0,
winRate: 0,
profitFactor: 0,
totalTrades: 0,
avgTrade: 0,
returns: [],
drawdown: [],
equity: []
};
}
const returns = trades.map(t => t.return);
const pnls = trades.map(t => t.pnl);
// Calculate equity curve
const equity: number[] = [10000]; // Starting capital
let currentEquity = 10000;
for (const trade of trades) {
currentEquity += trade.pnl;
equity.push(currentEquity);
}
// Calculate drawdown
const drawdown: number[] = [];
let peak = equity[0];
for (const eq of equity) {
if (eq > peak) peak = eq;
drawdown.push((peak - eq) / peak);
}
const totalReturns = (equity[equity.length - 1] - equity[0]) / equity[0];
const avgReturn = returns.reduce((sum, r) => sum + r, 0) / returns.length;
const returnStd = Math.sqrt(
returns.reduce((sum, r) => sum + Math.pow(r - avgReturn, 2), 0) / returns.length
);
const winningTrades = trades.filter(t => t.pnl > 0);
const losingTrades = trades.filter(t => t.pnl < 0);
const grossProfit = winningTrades.reduce((sum, t) => sum + t.pnl, 0);
const grossLoss = Math.abs(losingTrades.reduce((sum, t) => sum + t.pnl, 0));
return {
totalReturns,
sharpeRatio: returnStd !== 0 ? (avgReturn / returnStd) * Math.sqrt(252) : 0,
maxDrawdown: Math.max(...drawdown),
winRate: winningTrades.length / trades.length,
profitFactor: grossLoss !== 0 ? grossProfit / grossLoss : Infinity,
totalTrades: trades.length,
avgTrade: pnls.reduce((sum, pnl) => sum + pnl, 0) / trades.length,
returns,
drawdown,
equity
};
}
// Utility methods for vectorized operations
applyOperation(operationName: string, inputs: Record<string, number[]>): number[] {
const operation = this.operations.get(operationName);
if (!operation) {
throw new Error(`Operation '${operationName}' not found`);
}
const inputArrays = operation.inputs.map(inputName => {
if (!inputs[inputName]) {
throw new Error(`Input '${inputName}' not provided for operation '${operationName}'`);
}
return inputs[inputName];
});
return operation.operation(inputArrays);
}
// Batch processing for multiple strategies
async batchBacktest(
data: DataFrame,
strategies: Array<{ id: string; code: string }>
): Promise<Record<string, VectorizedBacktestResult>> {
const results: Record<string, VectorizedBacktestResult> = {};
for (const strategy of strategies) {
try {
this.logger.info(`Running vectorized backtest for strategy: ${strategy.id}`);
results[strategy.id] = await this.executeVectorizedStrategy(data, strategy.code);
} catch (error) {
this.logger.error(`Backtest failed for strategy: ${strategy.id}`, error);
// Continue with other strategies
}
}
return results;
}
}
import { DataFrame } from '@stock-bot/data-frame';
import { getLogger } from '@stock-bot/logger';
import { atr, bollingerBands, ema, macd, rsi, sma } from '@stock-bot/utils';
// Vector operations interface
export interface VectorOperation {
name: string;
inputs: string[];
output: string;
operation: (inputs: number[][]) => number[];
}
// Vectorized strategy context
export interface VectorizedContext {
data: DataFrame;
lookback: number;
indicators: Record<string, number[]>;
signals: Record<string, number[]>;
}
// Performance metrics for vectorized backtesting
export interface VectorizedMetrics {
totalReturns: number;
sharpeRatio: number;
maxDrawdown: number;
winRate: number;
profitFactor: number;
totalTrades: number;
avgTrade: number;
returns: number[];
drawdown: number[];
equity: number[];
}
// Vectorized backtest result
export interface VectorizedBacktestResult {
metrics: VectorizedMetrics;
trades: VectorizedTrade[];
equity: number[];
timestamps: number[];
signals: Record<string, number[]>;
}
export interface VectorizedTrade {
entryIndex: number;
exitIndex: number;
entryPrice: number;
exitPrice: number;
quantity: number;
side: 'LONG' | 'SHORT';
pnl: number;
return: number;
duration: number;
}
// Vectorized strategy engine
export class VectorEngine {
private logger = getLogger('vector-engine');
private operations: Map<string, VectorOperation> = new Map();
constructor() {
this.registerDefaultOperations();
}
private registerDefaultOperations(): void {
// Register common mathematical operations
this.registerOperation({
name: 'add',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val + b[i]),
});
this.registerOperation({
name: 'subtract',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val - b[i]),
});
this.registerOperation({
name: 'multiply',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => val * b[i]),
});
this.registerOperation({
name: 'divide',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => (b[i] !== 0 ? val / b[i] : NaN)),
});
// Register comparison operations
this.registerOperation({
name: 'greater_than',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => (val > b[i] ? 1 : 0)),
});
this.registerOperation({
name: 'less_than',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => a.map((val, i) => (val < b[i] ? 1 : 0)),
});
this.registerOperation({
name: 'crossover',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => {
const result = new Array(a.length).fill(0);
for (let i = 1; i < a.length; i++) {
if (a[i] > b[i] && a[i - 1] <= b[i - 1]) {
result[i] = 1;
}
}
return result;
},
});
this.registerOperation({
name: 'crossunder',
inputs: ['a', 'b'],
output: 'result',
operation: ([a, b]) => {
const result = new Array(a.length).fill(0);
for (let i = 1; i < a.length; i++) {
if (a[i] < b[i] && a[i - 1] >= b[i - 1]) {
result[i] = 1;
}
}
return result;
},
});
}
registerOperation(operation: VectorOperation): void {
this.operations.set(operation.name, operation);
this.logger.debug(`Registered operation: ${operation.name}`);
}
// Execute vectorized strategy
async executeVectorizedStrategy(
data: DataFrame,
strategyCode: string
): Promise<VectorizedBacktestResult> {
try {
const context = this.prepareContext(data);
const signals = this.executeStrategy(context, strategyCode);
const trades = this.generateTrades(data, signals);
const metrics = this.calculateMetrics(data, trades);
return {
metrics,
trades,
equity: metrics.equity,
timestamps: data.getColumn('timestamp'),
signals,
};
} catch (error) {
this.logger.error('Vectorized strategy execution failed', error);
throw error;
}
}
private prepareContext(data: DataFrame): VectorizedContext {
const close = data.getColumn('close');
const high = data.getColumn('high');
const low = data.getColumn('low');
const volume = data.getColumn('volume');
// Calculate common indicators
const indicators: Record<string, number[]> = {
sma_20: sma(close, 20),
sma_50: sma(close, 50),
ema_12: ema(close, 12),
ema_26: ema(close, 26),
rsi: rsi(close),
};
const m = macd(close);
indicators.macd = m.macd;
indicators.macd_signal = m.signal;
indicators.macd_histogram = m.histogram;
const bb = bollingerBands(close);
indicators.bb_upper = bb.upper;
indicators.bb_middle = bb.middle;
indicators.bb_lower = bb.lower;
return {
data,
lookback: 100,
indicators,
signals: {},
};
}
private executeStrategy(
context: VectorizedContext,
strategyCode: string
): Record<string, number[]> {
// This is a simplified strategy execution
// In production, you'd want a more sophisticated strategy compiler/interpreter
const signals: Record<string, number[]> = {
buy: new Array(context.data.length).fill(0),
sell: new Array(context.data.length).fill(0),
};
// Example: Simple moving average crossover strategy
if (strategyCode.includes('sma_crossover')) {
const sma20 = context.indicators.sma_20;
const sma50 = context.indicators.sma_50;
for (let i = 1; i < sma20.length; i++) {
// Buy signal: SMA20 crosses above SMA50
if (!isNaN(sma20[i]) && !isNaN(sma50[i]) && !isNaN(sma20[i - 1]) && !isNaN(sma50[i - 1])) {
if (sma20[i] > sma50[i] && sma20[i - 1] <= sma50[i - 1]) {
signals.buy[i] = 1;
}
// Sell signal: SMA20 crosses below SMA50
else if (sma20[i] < sma50[i] && sma20[i - 1] >= sma50[i - 1]) {
signals.sell[i] = 1;
}
}
}
}
return signals;
}
private generateTrades(data: DataFrame, signals: Record<string, number[]>): VectorizedTrade[] {
const trades: VectorizedTrade[] = [];
const close = data.getColumn('close');
const timestamps = data.getColumn('timestamp');
let position: { index: number; price: number; side: 'LONG' | 'SHORT' } | null = null;
for (let i = 0; i < close.length; i++) {
if (signals.buy[i] === 1 && !position) {
// Open long position
position = {
index: i,
price: close[i],
side: 'LONG',
};
} else if (signals.sell[i] === 1) {
if (position && position.side === 'LONG') {
// Close long position
const trade: VectorizedTrade = {
entryIndex: position.index,
exitIndex: i,
entryPrice: position.price,
exitPrice: close[i],
quantity: 1, // Simplified: always trade 1 unit
side: 'LONG',
pnl: close[i] - position.price,
return: (close[i] - position.price) / position.price,
duration: timestamps[i] - timestamps[position.index],
};
trades.push(trade);
position = null;
} else if (!position) {
// Open short position
position = {
index: i,
price: close[i],
side: 'SHORT',
};
}
} else if (signals.buy[i] === 1 && position && position.side === 'SHORT') {
// Close short position
const trade: VectorizedTrade = {
entryIndex: position.index,
exitIndex: i,
entryPrice: position.price,
exitPrice: close[i],
quantity: 1,
side: 'SHORT',
pnl: position.price - close[i],
return: (position.price - close[i]) / position.price,
duration: timestamps[i] - timestamps[position.index],
};
trades.push(trade);
position = null;
}
}
return trades;
}
private calculateMetrics(data: DataFrame, trades: VectorizedTrade[]): VectorizedMetrics {
if (trades.length === 0) {
return {
totalReturns: 0,
sharpeRatio: 0,
maxDrawdown: 0,
winRate: 0,
profitFactor: 0,
totalTrades: 0,
avgTrade: 0,
returns: [],
drawdown: [],
equity: [],
};
}
const returns = trades.map(t => t.return);
const pnls = trades.map(t => t.pnl);
// Calculate equity curve
const equity: number[] = [10000]; // Starting capital
let currentEquity = 10000;
for (const trade of trades) {
currentEquity += trade.pnl;
equity.push(currentEquity);
}
// Calculate drawdown
const drawdown: number[] = [];
let peak = equity[0];
for (const eq of equity) {
if (eq > peak) peak = eq;
drawdown.push((peak - eq) / peak);
}
const totalReturns = (equity[equity.length - 1] - equity[0]) / equity[0];
const avgReturn = returns.reduce((sum, r) => sum + r, 0) / returns.length;
const returnStd = Math.sqrt(
returns.reduce((sum, r) => sum + Math.pow(r - avgReturn, 2), 0) / returns.length
);
const winningTrades = trades.filter(t => t.pnl > 0);
const losingTrades = trades.filter(t => t.pnl < 0);
const grossProfit = winningTrades.reduce((sum, t) => sum + t.pnl, 0);
const grossLoss = Math.abs(losingTrades.reduce((sum, t) => sum + t.pnl, 0));
return {
totalReturns,
sharpeRatio: returnStd !== 0 ? (avgReturn / returnStd) * Math.sqrt(252) : 0,
maxDrawdown: Math.max(...drawdown),
winRate: winningTrades.length / trades.length,
profitFactor: grossLoss !== 0 ? grossProfit / grossLoss : Infinity,
totalTrades: trades.length,
avgTrade: pnls.reduce((sum, pnl) => sum + pnl, 0) / trades.length,
returns,
drawdown,
equity,
};
}
// Utility methods for vectorized operations
applyOperation(operationName: string, inputs: Record<string, number[]>): number[] {
const operation = this.operations.get(operationName);
if (!operation) {
throw new Error(`Operation '${operationName}' not found`);
}
const inputArrays = operation.inputs.map(inputName => {
if (!inputs[inputName]) {
throw new Error(`Input '${inputName}' not provided for operation '${operationName}'`);
}
return inputs[inputName];
});
return operation.operation(inputArrays);
}
// Batch processing for multiple strategies
async batchBacktest(
data: DataFrame,
strategies: Array<{ id: string; code: string }>
): Promise<Record<string, VectorizedBacktestResult>> {
const results: Record<string, VectorizedBacktestResult> = {};
for (const strategy of strategies) {
try {
this.logger.info(`Running vectorized backtest for strategy: ${strategy.id}`);
results[strategy.id] = await this.executeVectorizedStrategy(data, strategy.code);
} catch (error) {
this.logger.error(`Backtest failed for strategy: ${strategy.id}`, error);
// Continue with other strategies
}
}
return results;
}
}