diff --git a/libs/utils/src/calculations/correlation-analysis.ts b/libs/utils/src/calculations/correlation-analysis.ts index 794b854..214a9bd 100644 --- a/libs/utils/src/calculations/correlation-analysis.ts +++ b/libs/utils/src/calculations/correlation-analysis.ts @@ -645,19 +645,81 @@ function erf(x: number): number { } function betaIncomplete(a: number, b: number, x: number): number { - // Simplified beta incomplete function + // Better approximation of incomplete beta function if (x === 0) return 0; if (x === 1) return 1; - // Use continued fraction approximation - let result = 0; - for (let i = 0; i < 100; i++) { - const term = Math.pow(x, a + i) * Math.pow(1 - x, b) / (a + i); - result += term; - if (Math.abs(term) < 1e-10) break; + // Use continued fraction approximation (Lentz's algorithm) + const fpmin = 1e-30; + const maxIter = 200; + const eps = 3e-7; + + const bt = Math.exp( + gammaLn(a + b) - gammaLn(a) - gammaLn(b) + + a * Math.log(x) + b * Math.log(1 - x) + ); + + if (x < (a + 1) / (a + b + 2)) { + return bt * betaContinuedFraction(a, b, x) / a; + } else { + return 1 - bt * betaContinuedFraction(b, a, 1 - x) / b; } - return result; + function betaContinuedFraction(a: number, b: number, x: number): number { + let c = 1; + let d = 1 - (a + b) * x / (a + 1); + if (Math.abs(d) < fpmin) d = fpmin; + d = 1 / d; + let h = d; + + for (let m = 1; m <= maxIter; m++) { + const m2 = 2 * m; + const aa = m * (b - m) * x / ((a + m2 - 1) * (a + m2)); + d = 1 + aa * d; + if (Math.abs(d) < fpmin) d = fpmin; + c = 1 + aa / c; + if (Math.abs(c) < fpmin) c = fpmin; + d = 1 / d; + h *= d * c; + + const bb = -(a + m) * (a + b + m) * x / ((a + m2) * (a + m2 + 1)); + d = 1 + bb * d; + if (Math.abs(d) < fpmin) d = fpmin; + c = 1 + bb / c; + if (Math.abs(c) < fpmin) c = fpmin; + d = 1 / d; + const del = d * c; + h *= del; + + if (Math.abs(del - 1) < eps) break; + } + + return h; + } + + function gammaLn(xx: number): number { + const stp = 2.50662827465; + const coeffs = [ + 76.18009172947146, + -86.50532032941677, + 24.01409824083091, + -1.231739572450155, + 0.1208650973866179e-2, + -0.5395239384953e-5 + ]; + + let x = xx - 1; + let tmp = x + 5.5; + tmp -= (x + 0.5) * Math.log(tmp); + let ser = 1.000000000190015; + + for (let j = 0; j < 6; j++) { + x += 1; + ser += coeffs[j] / x; + } + + return -tmp + Math.log(stp * ser); + } } function eigenDecomposition(matrix: number[][]): { eigenvalues: number[]; eigenvectors: number[][] } { diff --git a/libs/utils/src/calculations/market-statistics.ts b/libs/utils/src/calculations/market-statistics.ts index 02f2958..f513897 100644 --- a/libs/utils/src/calculations/market-statistics.ts +++ b/libs/utils/src/calculations/market-statistics.ts @@ -208,14 +208,13 @@ export function identifyMarketRegime( const recentData = ohlcv.slice(-lookbackPeriod); const prices = recentData.map(candle => candle.close); const volumes = recentData.map(candle => candle.volume); - - // returns and volatility + // returns and volatility const returns = []; for (let i = 1; i < prices.length; i++) { returns.push((prices[i] - prices[i - 1]) / prices[i - 1]); } - const volatility = Volatility(returns); + const volatility = calculateVolatility(returns); const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length; // Trend analysis @@ -319,11 +318,10 @@ export function IntradayPatterns( for (let hour = 0; hour < 24; hour++) { const data = hourlyData[hour]; - - hourlyReturns[hour] = data.returns.length > 0 ? + hourlyReturns[hour] = data.returns.length > 0 ? data.returns.reduce((sum, ret) => sum + ret, 0) / data.returns.length : 0; - hourlyVolatility[hour] = Volatility(data.returns); + hourlyVolatility[hour] = calculateVolatility(data.returns); hourlyVolume[hour] = data.volumes.length > 0 ? data.volumes.reduce((sum, vol) => sum + vol, 0) / data.volumes.length : 0; @@ -374,21 +372,20 @@ export function PriceDiscovery( returns1.push((prices1[i] - prices1[i - 1]) / prices1[i - 1]); returns2.push((prices2[i] - prices2[i - 1]) / prices2[i - 1]); } - - // correlations with lags - const correlation0 = Correlation(returns1, returns2); + // correlations with lags + const correlation0 = calculateCorrelation(returns1, returns2); const correlation1 = returns1.length > 1 ? - Correlation(returns1.slice(1), returns2.slice(0, -1)) : 0; + calculateCorrelation(returns1.slice(1), returns2.slice(0, -1)) : 0; const correlationMinus1 = returns1.length > 1 ? - Correlation(returns1.slice(0, -1), returns2.slice(1)) : 0; + calculateCorrelation(returns1.slice(0, -1), returns2.slice(1)) : 0; // Price lead-lag (simplified) const priceLeadLag = correlation1 - correlationMinus1; // Information shares (simplified Hasbrouck methodology) - const variance1 = Variance(returns1); - const variance2 = Variance(returns2); - const covariance = Covariance(returns1, returns2); + const variance1 = calculateVariance(returns1); + const variance2 = calculateVariance(returns2); + const covariance = calculateCovariance(returns1, returns2); const totalVariance = variance1 + variance2 + 2 * covariance; const informationShare1 = totalVariance > 0 ? (variance1 + covariance) / totalVariance : 0.5; @@ -436,14 +433,13 @@ export function MarketStress( returns.push((recentData[i].close - recentData[i - 1].close) / recentData[i - 1].close); volumes.push(recentData[i].volume); } - - // Volatility stress - const volatility = Volatility(returns); + // Volatility stress + const volatility = calculateVolatility(returns); const volatilityStress = Math.min(1, volatility / 0.05); // Normalize to 5% daily vol // Liquidity stress (volume-based) const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length; - const volumeVariability = Volatility(volumes.map(vol => vol / averageVolume)); + const volumeVariability = calculateVolatility(volumes.map(vol => vol / averageVolume)); const liquidityStress = Math.min(1, volumeVariability); // Correlation stress (simplified - would need multiple assets) @@ -542,7 +538,7 @@ export function ImplementationShortfall( // Helper functions -function Volatility(returns: number[]): number { +function calculateVolatility(returns: number[]): number { if (returns.length < 2) return 0; const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length; @@ -551,7 +547,7 @@ function Volatility(returns: number[]): number { return Math.sqrt(variance); } -function Correlation(x: number[], y: number[]): number { +function calculateCorrelation(x: number[], y: number[]): number { if (x.length !== y.length || x.length < 2) return 0; const n = x.length; diff --git a/libs/utils/src/calculations/position-sizing.ts b/libs/utils/src/calculations/position-sizing.ts index 7b9dfff..cf68c82 100644 --- a/libs/utils/src/calculations/position-sizing.ts +++ b/libs/utils/src/calculations/position-sizing.ts @@ -220,18 +220,14 @@ export function sharpeOptimizedPositionSize( ): number { // Input validation if (volatility <= 0 || accountSize <= 0 || expectedReturn <= riskFreeRate || maxLeverage <= 0) return 0; - - // Kelly criterion with Sharpe ratio optimization + // Kelly criterion with Sharpe ratio optimization const excessReturn = expectedReturn - riskFreeRate; const kellyFraction = excessReturn / (volatility * volatility); - // Apply maximum leverage constraint and ensure reasonable bounds + // Apply maximum leverage constraint const constrainedFraction = Math.max(0, Math.min(kellyFraction, maxLeverage)); - // Further cap at 100% of account for safety - const finalFraction = Math.min(constrainedFraction, 1); - - return accountSize * finalFraction; + return accountSize * constrainedFraction; } /** @@ -405,14 +401,12 @@ export function riskParityPositionSize( return assets.map(asset => { if (asset.volatility === 0 || asset.price === 0) return 0; - - // Calculate weight based on inverse volatility + // Calculate weight based on inverse volatility const weight = (1 / asset.volatility) / totalInverseVol; - // Scale by target risk - const riskAdjustedWeight = weight * (targetRisk / asset.volatility); - - const positionValue = accountSize * riskAdjustedWeight; + // The weight itself already accounts for risk parity + // We just need to scale by target risk once + const positionValue = accountSize * weight * targetRisk; return Math.floor(positionValue / asset.price); }); } diff --git a/libs/utils/src/calculations/risk-metrics.ts b/libs/utils/src/calculations/risk-metrics.ts index dce7812..344ed4e 100644 --- a/libs/utils/src/calculations/risk-metrics.ts +++ b/libs/utils/src/calculations/risk-metrics.ts @@ -359,9 +359,10 @@ export function calculateRiskMetrics( /** * Helper function to get Z-score for confidence level + * This implementation handles arbitrary confidence levels */ function getZScore(confidenceLevel: number): number { - // Approximate Z-scores for common confidence levels + // First check our lookup table for common values (more precise) const zScores: { [key: string]: number } = { '0.90': 1.282, '0.95': 1.645, @@ -371,7 +372,17 @@ function getZScore(confidenceLevel: number): number { }; const key = confidenceLevel.toString(); - return zScores[key] || 1.645; // Default to 95% confidence + if (zScores[key]) return zScores[key]; + + // For arbitrary confidence levels, use approximation + if (confidenceLevel < 0.5) return -getZScore(1 - confidenceLevel); + + if (confidenceLevel >= 0.999) return 3.09; // Cap at 99.9% for numerical stability + + // Approximation of inverse normal CDF + const y = Math.sqrt(-2.0 * Math.log(1.0 - confidenceLevel)); + return y - (2.515517 + 0.802853 * y + 0.010328 * y * y) / + (1.0 + 1.432788 * y + 0.189269 * y * y + 0.001308 * y * y * y); } /** diff --git a/libs/utils/test/calculations/position-sizing.test.ts b/libs/utils/test/calculations/position-sizing.test.ts index cca493e..3500285 100644 --- a/libs/utils/test/calculations/position-sizing.test.ts +++ b/libs/utils/test/calculations/position-sizing.test.ts @@ -340,6 +340,38 @@ describe('Position Sizing Calculations', () => { }); }); + describe('sharpeOptimizedPositionSize', () => { + it('should calculate position size based on Sharpe optimization', () => { + const result = sharpeOptimizedPositionSize(100000, 0.15, 0.20, 0.02, 3); + + // Kelly formula for continuous returns: f = (μ - r) / σ² + // Expected return: 0.15, Risk-free: 0.02, Volatility: 0.20 + // f = (0.15 - 0.02) / (0.20)² = 0.13 / 0.04 = 3.25 + // But capped at maxLeverage=3, so should be 3.0 + // Final position: 100000 * 3 = 300000 + expect(result).toBe(300000); + }); + + it('should return 0 for invalid inputs', () => { + // Invalid volatility + expect(sharpeOptimizedPositionSize(100000, 0.15, 0, 0.02)).toBe(0); + + // Invalid account size + expect(sharpeOptimizedPositionSize(0, 0.15, 0.20, 0.02)).toBe(0); + + // Expected return less than risk-free rate + expect(sharpeOptimizedPositionSize(100000, 0.01, 0.20, 0.02)).toBe(0); + }); + + it('should respect maximum leverage', () => { + const result = sharpeOptimizedPositionSize(100000, 0.30, 0.20, 0.02, 2); + + // Kelly fraction would be (0.30 - 0.02) / (0.20)² = 7, but capped at 2 + // Position: 100000 * 2 = 200000 + expect(result).toBe(200000); + }); + }); + describe('validatePositionSize', () => { it('should validate position size against limits', () => { const result = validatePositionSize(500, 100, 100000, 10, 2);