work on calculations

This commit is contained in:
Bojan Kucera 2025-06-04 18:16:16 -04:00
parent 3d910a13e0
commit ab7ef2b678
20 changed files with 1343 additions and 222 deletions

View file

@ -30,7 +30,7 @@ export function logReturn(initialPrice: number, finalPrice: number): number {
/**
* Calculate compound annual growth rate (CAGR)
*/
export function calculateCAGR(startValue: number, endValue: number, years: number): number {
export function cagr(startValue: number, endValue: number, years: number): number {
if (years <= 0 || startValue <= 0 || endValue <= 0) return 0;
return Math.pow(endValue / startValue, 1 / years) - 1;
}

View file

@ -329,8 +329,8 @@ export function partialCorrelation(
}
// Calculate residuals for x and y after regressing on controls
const xResiduals = calculateResiduals(x, X);
const yResiduals = calculateResiduals(y, X);
const xResiduals = residuals(x, X);
const yResiduals = residuals(y, X);
return pearsonCorrelation(xResiduals, yResiduals);
}
@ -568,7 +568,7 @@ export function grangerCausalityTest(
// Find optimal lag
for (let lag = 1; lag <= maxLag; lag++) {
const aic = calculateVARModel(x, y, lag).aic;
const aic = varModel(x, y, lag).aic;
if (aic < minAIC) {
minAIC = aic;
bestLag = lag;
@ -576,14 +576,14 @@ export function grangerCausalityTest(
}
// Test x -> y causality
const fullModel = calculateVARModel(x, y, bestLag);
const restrictedModelY = calculateARModel(y, bestLag);
const fullModel = varModel(x, y, bestLag);
const restrictedModelY = arModel(y, bestLag);
const fStatX = ((restrictedModelY.rss - fullModel.rssY) / bestLag) / (fullModel.rssY / (x.length - 2 * bestLag - 1));
const pValueX = 1 - fCDF(fStatX, bestLag, x.length - 2 * bestLag - 1);
// Test y -> x causality
const restrictedModelX = calculateARModel(x, bestLag);
const restrictedModelX = arModel(x, bestLag);
const fStatY = ((restrictedModelX.rss - fullModel.rssX) / bestLag) / (fullModel.rssX / (x.length - 2 * bestLag - 1));
const pValueY = 1 - fCDF(fStatY, bestLag, x.length - 2 * bestLag - 1);
@ -699,7 +699,7 @@ function eigenDecomposition(matrix: number[][]): { eigenvalues: number[]; eigenv
return { eigenvalues, eigenvectors };
}
function calculateResiduals(y: number[], X: number[][]): number[] {
function residuals(y: number[], X: number[][]): number[] {
// Simple linear regression to calculate residuals
const n = y.length;
const k = X[0].length;
@ -768,7 +768,7 @@ function solveLinearSystem(A: number[][], b: number[]): number[] {
return x;
}
function calculateVARModel(x: number[], y: number[], lag: number): {
function varModel(x: number[], y: number[], lag: number): {
rssX: number;
rssY: number;
aic: number;
@ -792,8 +792,8 @@ function calculateVARModel(x: number[], y: number[], lag: number): {
}
// Calculate residuals for both equations
const residualsX = calculateResiduals(yX, X);
const residualsY = calculateResiduals(yY, X);
const residualsX = residuals(yX, X);
const residualsY = residuals(yY, X);
const rssX = residualsX.reduce((sum, r) => sum + r * r, 0);
const rssY = residualsY.reduce((sum, r) => sum + r * r, 0);
@ -804,7 +804,7 @@ function calculateVARModel(x: number[], y: number[], lag: number): {
return { rssX, rssY, aic };
}
function calculateARModel(y: number[], lag: number): { rss: number } {
function arModel(y: number[], lag: number): { rss: number } {
const n = y.length - lag;
// Build design matrix
@ -819,8 +819,8 @@ function calculateARModel(y: number[], lag: number): { rss: number } {
}
}
const residuals = calculateResiduals(yVec, X);
const rss = residuals.reduce((sum, r) => sum + r * r, 0);
const res = residuals(yVec, X);
const rss = res.reduce((sum, r) => sum + r * r, 0);
return { rss };
}

View file

@ -43,6 +43,12 @@ export interface RiskMetrics {
downside_deviation: number;
calmar_ratio: number;
sortino_ratio: number;
beta: number;
alpha: number;
sharpeRatio: number;
treynorRatio: number;
trackingError: number;
informationRatio: number;
}
export interface TechnicalIndicators {
@ -59,7 +65,36 @@ export interface TechnicalIndicators {
roc: number[];
}
// Export interfaces from all modules
// Additional interfaces for new functionality
export interface TradeExecution {
entry: number;
exit: number;
peak?: number;
trough?: number;
volume: number;
timestamp: Date;
}
export interface MarketData {
price: number;
volume: number;
timestamp: Date;
bid?: number;
ask?: number;
bidSize?: number;
askSize?: number;
}
export interface BacktestResults {
trades: TradeExecution[];
equityCurve: Array<{ value: number; date: Date }>;
performance: PortfolioMetrics;
riskMetrics: RiskMetrics;
drawdownAnalysis: any; // Import from performance-metrics
}
// Export all calculation functions
export * from './basic-calculations';
export * from './technical-indicators';
export * from './risk-metrics';
@ -70,3 +105,62 @@ export * from './performance-metrics';
export * from './market-statistics';
export * from './volatility-models';
export * from './correlation-analysis';
// Import specific functions for convenience functions
import {
sma, ema, rsi, macd, bollingerBands, atr, stochastic,
williamsR, cci, momentum, roc
} from './technical-indicators';
import { calculateRiskMetrics } from './risk-metrics';
import { calculateStrategyMetrics } from './performance-metrics';
// Convenience function to calculate all technical indicators at once
export function calculateAllTechnicalIndicators(
ohlcv: OHLCVData[],
periods: { sma?: number; ema?: number; rsi?: number; atr?: number } = {}
): TechnicalIndicators {
const {
sma: smaPeriod = 20,
ema: emaPeriod = 20,
rsi: rsiPeriod = 14,
atr: atrPeriod = 14
} = periods;
const closes = ohlcv.map(d => d.close);
return {
sma: sma(closes, smaPeriod),
ema: ema(closes, emaPeriod),
rsi: rsi(closes, rsiPeriod),
macd: macd(closes),
bollinger: bollingerBands(closes),
atr: atr(ohlcv, atrPeriod),
stochastic: stochastic(ohlcv),
williams_r: williamsR(ohlcv),
cci: cci(ohlcv),
momentum: momentum(closes),
roc: roc(closes)
};
}
// Convenience function for comprehensive portfolio analysis
export function analyzePortfolio(
returns: number[],
equityCurve: Array<{ value: number; date: Date }>,
benchmarkReturns?: number[],
riskFreeRate: number = 0.02
): {
performance: PortfolioMetrics;
risk: RiskMetrics;
trades?: any;
drawdown?: any;
} {
const performance = calculateStrategyMetrics(equityCurve, benchmarkReturns, riskFreeRate);
const equityValues = equityCurve.map(point => point.value);
const risk = calculateRiskMetrics(returns, equityValues, benchmarkReturns, riskFreeRate);
return {
performance,
risk
};
}

View file

@ -52,9 +52,9 @@ export interface MarketRegime {
}
/**
* Calculate Volume Weighted Average Price (VWAP)
* Volume Weighted Average Price (VWAP)
*/
export function calculateVWAP(ohlcv: OHLCVData[]): number[] {
export function VWAP(ohlcv: OHLCVData[]): number[] {
if (ohlcv.length === 0) return [];
const vwap: number[] = [];
@ -73,9 +73,9 @@ export function calculateVWAP(ohlcv: OHLCVData[]): number[] {
}
/**
* Calculate Time Weighted Average Price (TWAP)
* Time Weighted Average Price (TWAP)
*/
export function calculateTWAP(prices: number[], timeWeights?: number[]): number {
export function TWAP(prices: number[], timeWeights?: number[]): number {
if (prices.length === 0) return 0;
if (!timeWeights) {
@ -93,9 +93,9 @@ export function calculateTWAP(prices: number[], timeWeights?: number[]): number
}
/**
* Calculate market impact of trades
* market impact of trades
*/
export function calculateMarketImpact(
export function MarketImpact(
trades: Array<{ price: number; volume: number; side: 'buy' | 'sell'; timestamp: Date }>,
benchmarkPrice: number
): {
@ -138,9 +138,9 @@ export function calculateMarketImpact(
}
/**
* Calculate liquidity metrics
* liquidity metrics
*/
export function calculateLiquidityMetrics(
export function LiquidityMetrics(
ohlcv: OHLCVData[],
bidPrices: number[],
askPrices: number[],
@ -209,13 +209,13 @@ export function identifyMarketRegime(
const prices = recentData.map(candle => candle.close);
const volumes = recentData.map(candle => candle.volume);
// Calculate returns and volatility
// returns and volatility
const returns = [];
for (let i = 1; i < prices.length; i++) {
returns.push((prices[i] - prices[i - 1]) / prices[i - 1]);
}
const volatility = calculateVolatility(returns);
const volatility = Volatility(returns);
const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length;
// Trend analysis
@ -258,9 +258,9 @@ export function identifyMarketRegime(
}
/**
* Calculate order book imbalance
* order book imbalance
*/
export function calculateOrderBookImbalance(
export function OrderBookImbalance(
bidPrices: number[],
askPrices: number[],
bidSizes: number[],
@ -285,9 +285,9 @@ export function calculateOrderBookImbalance(
}
/**
* Calculate intraday patterns
* intraday patterns
*/
export function calculateIntradayPatterns(
export function IntradayPatterns(
ohlcv: OHLCVData[]
): {
hourlyReturns: { [hour: number]: number };
@ -312,7 +312,7 @@ export function calculateIntradayPatterns(
hourlyData[hour].volumes.push(ohlcv[i].volume);
}
// Calculate statistics for each hour
// statistics for each hour
const hourlyReturns: { [hour: number]: number } = {};
const hourlyVolatility: { [hour: number]: number } = {};
const hourlyVolume: { [hour: number]: number } = {};
@ -323,13 +323,13 @@ export function calculateIntradayPatterns(
hourlyReturns[hour] = data.returns.length > 0 ?
data.returns.reduce((sum, ret) => sum + ret, 0) / data.returns.length : 0;
hourlyVolatility[hour] = calculateVolatility(data.returns);
hourlyVolatility[hour] = Volatility(data.returns);
hourlyVolume[hour] = data.volumes.length > 0 ?
data.volumes.reduce((sum, vol) => sum + vol, 0) / data.volumes.length : 0;
}
// Calculate opening gap and closing drift
// opening gap and closing drift
const openingGap = ohlcv.length > 1 ?
(ohlcv[0].open - ohlcv[0].close) / ohlcv[0].close : 0;
@ -346,9 +346,9 @@ export function calculateIntradayPatterns(
}
/**
* Calculate price discovery metrics
* price discovery metrics
*/
export function calculatePriceDiscovery(
export function PriceDiscovery(
prices1: number[], // Prices from market 1
prices2: number[] // Prices from market 2
): {
@ -366,7 +366,7 @@ export function calculatePriceDiscovery(
};
}
// Calculate returns
// returns
const returns1 = [];
const returns2 = [];
@ -375,20 +375,20 @@ export function calculatePriceDiscovery(
returns2.push((prices2[i] - prices2[i - 1]) / prices2[i - 1]);
}
// Calculate correlations with lags
const correlation0 = calculateCorrelation(returns1, returns2);
// correlations with lags
const correlation0 = Correlation(returns1, returns2);
const correlation1 = returns1.length > 1 ?
calculateCorrelation(returns1.slice(1), returns2.slice(0, -1)) : 0;
Correlation(returns1.slice(1), returns2.slice(0, -1)) : 0;
const correlationMinus1 = returns1.length > 1 ?
calculateCorrelation(returns1.slice(0, -1), returns2.slice(1)) : 0;
Correlation(returns1.slice(0, -1), returns2.slice(1)) : 0;
// Price lead-lag (simplified)
const priceLeadLag = correlation1 - correlationMinus1;
// Information shares (simplified Hasbrouck methodology)
const variance1 = calculateVariance(returns1);
const variance2 = calculateVariance(returns2);
const covariance = calculateCovariance(returns1, returns2);
const variance1 = Variance(returns1);
const variance2 = Variance(returns2);
const covariance = Covariance(returns1, returns2);
const totalVariance = variance1 + variance2 + 2 * covariance;
const informationShare1 = totalVariance > 0 ? (variance1 + covariance) / totalVariance : 0.5;
@ -406,9 +406,9 @@ export function calculatePriceDiscovery(
}
/**
* Calculate market stress indicators
* market stress indicators
*/
export function calculateMarketStress(
export function MarketStress(
ohlcv: OHLCVData[],
lookbackPeriod: number = 20
): {
@ -438,12 +438,12 @@ export function calculateMarketStress(
}
// Volatility stress
const volatility = calculateVolatility(returns);
const volatility = Volatility(returns);
const volatilityStress = Math.min(1, volatility / 0.05); // Normalize to 5% daily vol
// Liquidity stress (volume-based)
const averageVolume = volumes.reduce((sum, vol) => sum + vol, 0) / volumes.length;
const volumeVariability = calculateVolatility(volumes.map(vol => vol / averageVolume));
const volumeVariability = Volatility(volumes.map(vol => vol / averageVolume));
const liquidityStress = Math.min(1, volumeVariability);
// Correlation stress (simplified - would need multiple assets)
@ -467,9 +467,82 @@ export function calculateMarketStress(
};
}
/**
* realized spread
*/
export function RealizedSpread(
trades: Array<{ price: number; side: 'buy' | 'sell'; timestamp: Date }>,
midPrices: number[],
timeWindow: number = 5 // minutes
): number {
if (trades.length === 0 || midPrices.length === 0) return 0;
let totalSpread = 0;
let count = 0;
for (const trade of trades) {
// Find corresponding mid price
const midPrice = midPrices[0]; // Simplified - should match by timestamp
const spread = trade.side === 'buy' ?
2 * (trade.price - midPrice) :
2 * (midPrice - trade.price);
totalSpread += spread;
count++;
}
return count > 0 ? totalSpread / count : 0;
}
/**
* implementation shortfall
*/
export function ImplementationShortfall(
decisionPrice: number,
executionPrices: number[],
volumes: number[],
commissions: number[],
marketImpact: number[]
): {
totalShortfall: number;
delayComponent: number;
marketImpactComponent: number;
timingComponent: number;
commissionComponent: number;
} {
if (executionPrices.length !== volumes.length) {
throw new Error('Execution prices and volumes must have same length');
}
const totalVolume = volumes.reduce((sum, vol) => sum + vol, 0);
const weightedExecutionPrice = executionPrices.reduce((sum, price, i) =>
sum + price * volumes[i], 0) / totalVolume;
const totalCommissions = commissions.reduce((sum, comm) => sum + comm, 0);
const totalMarketImpact = marketImpact.reduce((sum, impact, i) =>
sum + impact * volumes[i], 0);
const delayComponent = weightedExecutionPrice - decisionPrice;
const marketImpactComponent = totalMarketImpact / totalVolume;
const timingComponent = 0; // Simplified - would need benchmark price evolution
const commissionComponent = totalCommissions / totalVolume;
const totalShortfall = delayComponent + marketImpactComponent +
timingComponent + commissionComponent;
return {
totalShortfall,
delayComponent,
marketImpactComponent,
timingComponent,
commissionComponent
};
}
// Helper functions
function calculateVolatility(returns: number[]): number {
function Volatility(returns: number[]): number {
if (returns.length < 2) return 0;
const mean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
@ -478,7 +551,7 @@ function calculateVolatility(returns: number[]): number {
return Math.sqrt(variance);
}
function calculateCorrelation(x: number[], y: number[]): number {
function Correlation(x: number[], y: number[]): number {
if (x.length !== y.length || x.length < 2) return 0;
const n = x.length;
@ -503,14 +576,14 @@ function calculateCorrelation(x: number[], y: number[]): number {
return denominator > 0 ? numerator / denominator : 0;
}
function calculateVariance(values: number[]): number {
function Variance(values: number[]): number {
if (values.length < 2) return 0;
const mean = values.reduce((sum, val) => sum + val, 0) / values.length;
return values.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / (values.length - 1);
}
function calculateCovariance(x: number[], y: number[]): number {
function Covariance(x: number[], y: number[]): number {
if (x.length !== y.length || x.length < 2) return 0;
const n = x.length;

View file

@ -30,13 +30,15 @@ export interface VolatilityParams {
export function fixedRiskPositionSize(params: PositionSizeParams): number {
const { accountSize, riskPercentage, entryPrice, stopLoss, leverage = 1 } = params;
// Input validation
if (accountSize <= 0 || riskPercentage <= 0 || entryPrice <= 0 || leverage <= 0) return 0;
if (entryPrice === stopLoss) return 0;
const riskAmount = accountSize * (riskPercentage / 100);
const riskPerShare = Math.abs(entryPrice - stopLoss);
const basePositionSize = riskAmount / riskPerShare;
return basePositionSize * leverage;
return Math.floor(basePositionSize * leverage);
}
/**
@ -45,17 +47,18 @@ export function fixedRiskPositionSize(params: PositionSizeParams): number {
export function kellyPositionSize(params: KellyParams, accountSize: number): number {
const { winRate, averageWin, averageLoss } = params;
if (averageLoss === 0 || winRate === 0 || winRate === 1) return 0;
// Validate inputs
if (averageLoss === 0 || winRate <= 0 || winRate >= 1 || averageWin <= 0) return 0;
const lossRate = 1 - winRate;
const winLossRatio = averageWin / Math.abs(averageLoss);
// Kelly formula: f = (bp - q) / b
// Correct Kelly formula: f = (bp - q) / b
// where: b = win/loss ratio, p = win rate, q = loss rate
const kellyFraction = (winLossRatio * winRate - lossRate) / winLossRatio;
const kellyFraction = (winRate * winLossRatio - lossRate) / winLossRatio;
// Cap Kelly fraction to prevent over-leveraging
const cappedKelly = Math.max(0, Math.min(kellyFraction, 0.25));
// Cap Kelly fraction to prevent over-leveraging (max 25% of Kelly recommendation)
const cappedKelly = Math.max(0, Math.min(kellyFraction * 0.25, 0.25));
return accountSize * cappedKelly;
}
@ -68,6 +71,9 @@ export function fractionalKellyPositionSize(
accountSize: number,
fraction: number = 0.25
): number {
// Input validation
if (fraction <= 0 || fraction > 1) return 0;
const fullKelly = kellyPositionSize(params, accountSize);
return fullKelly * fraction;
}
@ -78,12 +84,13 @@ export function fractionalKellyPositionSize(
export function volatilityTargetPositionSize(params: VolatilityParams, accountSize: number): number {
const { price, volatility, targetVolatility } = params;
if (volatility === 0 || price === 0) return 0;
// Input validation
if (volatility <= 0 || price <= 0 || targetVolatility <= 0 || accountSize <= 0) return 0;
const volatilityRatio = targetVolatility / volatility;
const basePositionValue = accountSize * volatilityRatio;
const basePositionValue = accountSize * Math.min(volatilityRatio, 2); // Cap at 2x leverage
return basePositionValue / price;
return Math.floor(basePositionValue / price);
}
/**
@ -94,10 +101,11 @@ export function equalWeightPositionSize(
numberOfPositions: number,
price: number
): number {
if (numberOfPositions === 0 || price === 0) return 0;
// Input validation
if (numberOfPositions <= 0 || price <= 0 || accountSize <= 0) return 0;
const positionValue = accountSize / numberOfPositions;
return positionValue / price;
return Math.floor(positionValue / price);
}
/**
@ -114,8 +122,10 @@ export function atrBasedPositionSize(
const riskAmount = accountSize * (riskPercentage / 100);
const stopDistance = atrValue * atrMultiplier;
const positionSize = riskAmount / stopDistance;
return riskAmount / stopDistance;
// Return position size in shares, not dollars
return Math.floor(positionSize);
}
/**
@ -128,15 +138,20 @@ export function expectancyPositionSize(
averageLoss: number,
maxRiskPercentage: number = 2
): number {
// Input validation
if (accountSize <= 0 || winRate <= 0 || winRate >= 1 || averageWin <= 0 || averageLoss === 0) return 0;
const expectancy = (winRate * averageWin) - ((1 - winRate) * Math.abs(averageLoss));
if (expectancy <= 0) return 0;
// Scale position size based on expectancy
// Scale position size based on expectancy relative to average loss
// Higher expectancy relative to risk allows for larger position
const expectancyRatio = expectancy / Math.abs(averageLoss);
const riskPercentage = Math.min(expectancyRatio * 0.5, maxRiskPercentage);
return accountSize * (riskPercentage / 100);
const positionValue = accountSize * (riskPercentage / 100);
return positionValue;
}
/**
@ -151,28 +166,46 @@ export function monteCarloPositionSize(
if (historicalReturns.length === 0) return 0;
const outcomes: number[] = [];
const mean = historicalReturns.reduce((sum, ret) => sum + ret, 0) / historicalReturns.length;
const variance = historicalReturns.reduce((sum, ret) => sum + Math.pow(ret - mean, 2), 0) / historicalReturns.length;
const stdDev = Math.sqrt(variance);
for (let i = 0; i < simulations; i++) {
let portfolioValue = accountSize;
// Test different position sizes (as fraction of account)
const testFractions = [0.01, 0.025, 0.05, 0.075, 0.1, 0.15, 0.2, 0.25];
let optimalFraction = 0;
let bestSharpe = -Infinity;
for (const fraction of testFractions) {
const simOutcomes: number[] = [];
// Simulate a series of trades
for (let j = 0; j < 252; j++) { // One year of trading days
const randomReturn = historicalReturns[Math.floor(Math.random() * historicalReturns.length)];
portfolioValue *= (1 + randomReturn);
for (let i = 0; i < simulations; i++) {
let portfolioValue = accountSize;
// Simulate trades over a period
for (let j = 0; j < 50; j++) { // 50 trades
const randomReturn = historicalReturns[Math.floor(Math.random() * historicalReturns.length)];
const positionReturn = randomReturn * fraction;
portfolioValue = portfolioValue * (1 + positionReturn);
}
simOutcomes.push(portfolioValue);
}
outcomes.push(portfolioValue);
// Calculate Sharpe ratio for this fraction
const avgOutcome = simOutcomes.reduce((sum, val) => sum + val, 0) / simOutcomes.length;
const returns = simOutcomes.map(val => (val - accountSize) / accountSize);
const avgReturn = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
const returnStdDev = Math.sqrt(returns.reduce((sum, ret) => sum + Math.pow(ret - avgReturn, 2), 0) / returns.length);
const sharpe = returnStdDev > 0 ? avgReturn / returnStdDev : -Infinity;
if (sharpe > bestSharpe) {
bestSharpe = sharpe;
optimalFraction = fraction;
}
}
outcomes.sort((a, b) => a - b);
const worstCaseIndex = Math.floor((1 - confidenceLevel) * outcomes.length);
const worstCaseValue = outcomes[worstCaseIndex];
// Calculate safe position size based on worst-case scenario
const maxLoss = accountSize - worstCaseValue;
const safePositionRatio = Math.min(0.1, accountSize / (maxLoss * 10));
return accountSize * safePositionRatio;
return accountSize * optimalFraction;
}
/**
@ -185,15 +218,57 @@ export function sharpeOptimizedPositionSize(
riskFreeRate: number = 0.02,
maxLeverage: number = 3
): number {
if (volatility === 0) return 0;
// Input validation
if (volatility <= 0 || accountSize <= 0 || expectedReturn <= riskFreeRate || maxLeverage <= 0) return 0;
// Kelly criterion with Sharpe ratio optimization
const excessReturn = expectedReturn - riskFreeRate;
const sharpeRatio = excessReturn / volatility;
const kellyFraction = excessReturn / (volatility * volatility);
// Optimal leverage based on Sharpe ratio
const optimalLeverage = Math.min(sharpeRatio / volatility, maxLeverage);
// Apply maximum leverage constraint and ensure reasonable bounds
const constrainedFraction = Math.max(0, Math.min(kellyFraction, maxLeverage));
return accountSize * Math.max(0, optimalLeverage);
// Further cap at 100% of account for safety
const finalFraction = Math.min(constrainedFraction, 1);
return accountSize * finalFraction;
}
/**
* Fixed fractional position sizing
*/
export function fixedFractionalPositionSize(
accountSize: number,
riskPercentage: number,
stopLossPercentage: number,
price: number
): number {
// Input validation
if (stopLossPercentage <= 0 || price <= 0 || riskPercentage <= 0 || accountSize <= 0) return 0;
const riskAmount = accountSize * (riskPercentage / 100);
const stopLossAmount = price * (stopLossPercentage / 100);
return Math.floor(riskAmount / stopLossAmount);
}
/**
* Volatility-adjusted position sizing
*/
export function volatilityAdjustedPositionSize(
accountSize: number,
targetVolatility: number,
assetVolatility: number,
price: number
): number {
// Input validation
if (assetVolatility <= 0 || price <= 0 || targetVolatility <= 0 || accountSize <= 0) return 0;
const volatilityRatio = targetVolatility / assetVolatility;
const cappedRatio = Math.min(volatilityRatio, 3); // Cap at 3x leverage
const positionValue = accountSize * cappedRatio;
return Math.floor(positionValue / price);
}
/**
@ -204,17 +279,20 @@ export function correlationAdjustedPositionSize(
existingPositions: Array<{ size: number; correlation: number }>,
maxCorrelationRisk: number = 0.3
): number {
if (existingPositions.length === 0) return basePositionSize;
if (existingPositions.length === 0 || basePositionSize <= 0) return basePositionSize;
// Calculate total correlation risk
// Calculate portfolio correlation risk
// This should consider the correlation between the new position and existing ones
const totalCorrelationRisk = existingPositions.reduce((total, position) => {
return total + (position.size * Math.abs(position.correlation));
// Weight correlation by position size relative to new position
const relativeSize = position.size / (basePositionSize + position.size);
return total + (relativeSize * Math.abs(position.correlation));
}, 0);
// Adjust position size based on correlation risk
const correlationAdjustment = Math.max(0, 1 - (totalCorrelationRisk / maxCorrelationRisk));
const correlationAdjustment = Math.max(0.1, 1 - (totalCorrelationRisk / maxCorrelationRisk));
return basePositionSize * correlationAdjustment;
return Math.floor(basePositionSize * correlationAdjustment);
}
/**
@ -224,8 +302,15 @@ export function calculatePortfolioHeat(
positions: Array<{ value: number; risk: number }>,
accountSize: number
): number {
const totalRisk = positions.reduce((sum, position) => sum + position.risk, 0);
return (totalRisk / accountSize) * 100;
// Input validation
if (accountSize <= 0 || positions.length === 0) return 0;
const totalRisk = positions.reduce((sum, position) => {
// Ensure risk values are positive
return sum + Math.max(0, position.risk);
}, 0);
return Math.min((totalRisk / accountSize) * 100, 100); // Cap at 100%
}
/**
@ -238,13 +323,19 @@ export function dynamicPositionSize(
drawdownLevel: number,
maxDrawdownThreshold: number = 0.1
): number {
// Volatility adjustment
const volatilityAdjustment = normalVolatility / Math.max(marketVolatility, 0.01);
// Input validation
if (basePositionSize <= 0 || marketVolatility <= 0 || normalVolatility <= 0) return 0;
if (drawdownLevel < 0 || maxDrawdownThreshold <= 0) return basePositionSize;
// Drawdown adjustment
const drawdownAdjustment = Math.max(0.5, 1 - (drawdownLevel / maxDrawdownThreshold));
// Volatility adjustment - reduce size when volatility is high
const volatilityAdjustment = Math.min(normalVolatility / marketVolatility, 2); // Cap at 2x
return basePositionSize * volatilityAdjustment * drawdownAdjustment;
// Drawdown adjustment - reduce size as drawdown increases
const normalizedDrawdown = Math.min(drawdownLevel / maxDrawdownThreshold, 1);
const drawdownAdjustment = Math.max(0.1, 1 - normalizedDrawdown);
const adjustedSize = basePositionSize * volatilityAdjustment * drawdownAdjustment;
return Math.floor(Math.max(0, adjustedSize));
}
/**
@ -256,11 +347,11 @@ export function liquidityConstrainedPositionSize(
maxVolumePercentage: number = 0.05,
price: number
): number {
const maxShares = (averageDailyVolume * maxVolumePercentage);
const maxPositionValue = maxShares * price;
const desiredPositionValue = desiredPositionSize * price;
if (averageDailyVolume === 0 || price === 0) return 0;
return Math.min(desiredPositionSize, maxPositionValue / price);
const maxShares = averageDailyVolume * maxVolumePercentage;
return Math.min(desiredPositionSize, maxShares);
}
/**
@ -273,11 +364,19 @@ export function multiTimeframePositionSize(
longTermSignal: number, // -1 to 1
baseRiskPercentage: number = 1
): number {
// Input validation
if (accountSize <= 0 || baseRiskPercentage <= 0) return 0;
// Clamp signals to valid range
const clampedShort = Math.max(-1, Math.min(1, shortTermSignal));
const clampedMedium = Math.max(-1, Math.min(1, mediumTermSignal));
const clampedLong = Math.max(-1, Math.min(1, longTermSignal));
// Weight the signals (long-term gets higher weight)
const weightedSignal = (
shortTermSignal * 0.2 +
mediumTermSignal * 0.3 +
longTermSignal * 0.5
clampedShort * 0.2 +
clampedMedium * 0.3 +
clampedLong * 0.5
);
// Adjust risk based on signal strength
@ -294,12 +393,27 @@ export function riskParityPositionSize(
targetRisk: number,
accountSize: number
): number[] {
const totalInverseVol = assets.reduce((sum, asset) => sum + (1 / asset.volatility), 0);
if (assets.length === 0) return [];
// Calculate inverse volatility weights
const totalInverseVol = assets.reduce((sum, asset) => {
if (asset.volatility === 0) return sum;
return sum + (1 / asset.volatility);
}, 0);
if (totalInverseVol === 0) return assets.map(() => 0);
return assets.map(asset => {
if (asset.volatility === 0 || asset.price === 0) return 0;
// Calculate weight based on inverse volatility
const weight = (1 / asset.volatility) / totalInverseVol;
const positionValue = accountSize * weight;
return positionValue / asset.price;
// Scale by target risk
const riskAdjustedWeight = weight * (targetRisk / asset.volatility);
const positionValue = accountSize * riskAdjustedWeight;
return Math.floor(positionValue / asset.price);
});
}

View file

@ -154,20 +154,21 @@ export function informationRatio(portfolioReturns: number[], benchmarkReturns: n
}
/**
* Calculate Beta (systematic risk)
* Calculate beta coefficient
*/
export function beta(portfolioReturns: number[], marketReturns: number[]): number {
if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length < 2) {
return 0;
}
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length;
const n = portfolioReturns.length;
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / n;
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / n;
let covariance = 0;
let marketVariance = 0;
for (let i = 0; i < portfolioReturns.length; i++) {
for (let i = 0; i < n; i++) {
const portfolioDiff = portfolioReturns[i] - portfolioMean;
const marketDiff = marketReturns[i] - marketMean;
@ -175,26 +176,17 @@ export function beta(portfolioReturns: number[], marketReturns: number[]): numbe
marketVariance += marketDiff * marketDiff;
}
covariance /= (portfolioReturns.length - 1);
marketVariance /= (marketReturns.length - 1);
if (marketVariance === 0) return 0;
return covariance / marketVariance;
return marketVariance === 0 ? 0 : covariance / marketVariance;
}
/**
* Calculate Alpha (excess return over expected return based on beta)
* Calculate alpha
*/
export function alpha(
portfolioReturns: number[],
marketReturns: number[],
riskFreeRate: number = 0
): number {
if (portfolioReturns.length !== marketReturns.length || portfolioReturns.length === 0) {
return 0;
}
const portfolioMean = portfolioReturns.reduce((sum, ret) => sum + ret, 0) / portfolioReturns.length;
const marketMean = marketReturns.reduce((sum, ret) => sum + ret, 0) / marketReturns.length;
const portfolioBeta = beta(portfolioReturns, marketReturns);
@ -298,15 +290,70 @@ export function calculateRiskMetrics(
marketReturns?: number[],
riskFreeRate: number = 0
): RiskMetrics {
return {
var95: valueAtRisk(returns, 0.95),
var99: valueAtRisk(returns, 0.99),
cvar95: conditionalValueAtRisk(returns, 0.95),
maxDrawdown: maxDrawdown(equityCurve),
volatility: volatility(returns),
downside_deviation: downsideDeviation(returns),
calmar_ratio: calmarRatio(returns, equityCurve),
sortino_ratio: sortinoRatio(returns)
if (returns.length === 0) {
return {
var95: 0,
var99: 0,
cvar95: 0,
maxDrawdown: 0,
volatility: 0,
downside_deviation: 0,
calmar_ratio: 0,
sortino_ratio: 0,
beta: 0,
alpha: 0,
sharpeRatio: 0,
treynorRatio: 0,
trackingError: 0,
informationRatio: 0
};
}
const portfolioVolatility = volatility(returns);
const portfolioMean = returns.reduce((sum, ret) => sum + ret, 0) / returns.length;
// Calculate VaR
const var95Value = valueAtRisk(returns, 0.95);
const var99Value = valueAtRisk(returns, 0.99);
const cvar95Value = conditionalValueAtRisk(returns, 0.95);
// Calculate max drawdown
const maxDD = maxDrawdown(equityCurve);
// Calculate downside deviation
const downsideDeviationValue = downsideDeviation(returns);
// Calculate ratios
const calmarRatio = maxDD > 0 ? portfolioMean / maxDD : 0;
const sortinoRatio = downsideDeviationValue > 0 ? (portfolioMean - riskFreeRate) / downsideDeviationValue : 0;
const sharpeRatio = portfolioVolatility > 0 ? (portfolioMean - riskFreeRate) / portfolioVolatility : 0;
let portfolioBeta = 0;
let portfolioAlpha = 0;
let portfolioTreynorRatio = 0;
let portfolioTrackingError = 0;
let informationRatio = 0;
if (marketReturns && marketReturns.length === returns.length) {
portfolioBeta = beta(returns, marketReturns);
portfolioAlpha = alpha(returns, marketReturns, riskFreeRate);
portfolioTreynorRatio = treynorRatio(returns, marketReturns, riskFreeRate);
portfolioTrackingError = trackingError(returns, marketReturns);
informationRatio = portfolioTrackingError > 0 ? portfolioAlpha / portfolioTrackingError : 0;
}
return {
var95: var95Value,
var99: var99Value,
cvar95: cvar95Value,
maxDrawdown: maxDD,
volatility: portfolioVolatility,
downside_deviation: downsideDeviationValue,
calmar_ratio: calmarRatio,
sortino_ratio: sortinoRatio,
beta: portfolioBeta,
alpha: portfolioAlpha,
sharpeRatio,
treynorRatio: portfolioTreynorRatio,
trackingError: portfolioTrackingError,
informationRatio
};
}

View file

@ -253,7 +253,8 @@ export function momentum(prices: number[], period: number = 10): number[] {
const result: number[] = [];
for (let i = period; i < prices.length; i++) {
result.push(prices[i] - prices[i - period]);
const momentum = prices[i] - prices[i - period];
result.push(momentum);
}
return result;
@ -262,7 +263,7 @@ export function momentum(prices: number[], period: number = 10): number[] {
/**
* Rate of Change (ROC)
*/
export function rateOfChange(prices: number[], period: number = 10): number[] {
export function roc(prices: number[], period: number = 10): number[] {
if (period >= prices.length) return [];
const result: number[] = [];
@ -282,32 +283,33 @@ export function rateOfChange(prices: number[], period: number = 10): number[] {
/**
* Money Flow Index (MFI)
*/
export function moneyFlowIndex(ohlcv: OHLCVData[], period: number = 14): number[] {
export function mfi(ohlcv: OHLCVData[], period: number = 14): number[] {
if (period >= ohlcv.length) return [];
const typicalPrices = ohlcv.map(d => (d.high + d.low + d.close) / 3);
const rawMoneyFlows = ohlcv.map((d, i) => typicalPrices[i] * d.volume);
const moneyFlows = ohlcv.map((d, i) => typicalPrices[i] * d.volume);
const result: number[] = [];
for (let i = 1; i < ohlcv.length - period + 1; i++) {
for (let i = period; i < ohlcv.length; i++) {
let positiveFlow = 0;
let negativeFlow = 0;
for (let j = 0; j < period; j++) {
const currentIndex = i + j;
if (typicalPrices[currentIndex] > typicalPrices[currentIndex - 1]) {
positiveFlow += rawMoneyFlows[currentIndex];
} else if (typicalPrices[currentIndex] < typicalPrices[currentIndex - 1]) {
negativeFlow += rawMoneyFlows[currentIndex];
for (let j = i - period + 1; j <= i; j++) {
if (j > 0) {
if (typicalPrices[j] > typicalPrices[j - 1]) {
positiveFlow += moneyFlows[j];
} else if (typicalPrices[j] < typicalPrices[j - 1]) {
negativeFlow += moneyFlows[j];
}
}
}
if (negativeFlow === 0) {
result.push(100);
} else {
const moneyRatio = positiveFlow / negativeFlow;
const mfiValue = 100 - (100 / (1 + moneyRatio));
const mfiRatio = positiveFlow / negativeFlow;
const mfiValue = 100 - (100 / (1 + mfiRatio));
result.push(mfiValue);
}
}
@ -316,23 +318,24 @@ export function moneyFlowIndex(ohlcv: OHLCVData[], period: number = 14): number[
}
/**
* On Balance Volume (OBV)
* On-Balance Volume (OBV)
*/
export function onBalanceVolume(ohlcv: OHLCVData[]): number[] {
export function obv(ohlcv: OHLCVData[]): number[] {
if (ohlcv.length === 0) return [];
const result: number[] = [ohlcv[0].volume];
for (let i = 1; i < ohlcv.length; i++) {
let obvValue = result[i - 1];
const prev = ohlcv[i - 1];
const curr = ohlcv[i];
if (ohlcv[i].close > ohlcv[i - 1].close) {
obvValue += ohlcv[i].volume;
} else if (ohlcv[i].close < ohlcv[i - 1].close) {
obvValue -= ohlcv[i].volume;
if (curr.close > prev.close) {
result.push(result[result.length - 1] + curr.volume);
} else if (curr.close < prev.close) {
result.push(result[result.length - 1] - curr.volume);
} else {
result.push(result[result.length - 1]);
}
result.push(obvValue);
}
return result;
@ -403,63 +406,54 @@ export function chaikinMoneyFlow(ohlcv: OHLCVData[], period: number = 20): numbe
export function parabolicSAR(
ohlcv: OHLCVData[],
step: number = 0.02,
maximum: number = 0.2
maxStep: number = 0.2
): number[] {
if (ohlcv.length < 2) return [];
const result: number[] = [];
let isUptrend = ohlcv[1].close > ohlcv[0].close;
let sar = isUptrend ? ohlcv[0].low : ohlcv[0].high;
let ep = isUptrend ? ohlcv[1].high : ohlcv[1].low;
let af = step;
let trend = 1; // 1 for uptrend, -1 for downtrend
let acceleration = step;
let extremePoint = ohlcv[0].high;
let sar = ohlcv[0].low;
result.push(sar);
for (let i = 1; i < ohlcv.length; i++) {
const currentHigh = ohlcv[i].high;
const currentLow = ohlcv[i].low;
const currentClose = ohlcv[i].close;
const curr = ohlcv[i];
const prev = ohlcv[i - 1];
// Calculate new SAR
sar = sar + af * (ep - sar);
sar = sar + acceleration * (extremePoint - sar);
if (isUptrend) {
// Uptrend logic
if (currentLow <= sar) {
if (trend === 1) { // Uptrend
if (curr.low <= sar) {
// Trend reversal
isUptrend = false;
sar = ep;
ep = currentLow;
af = step;
trend = -1;
sar = extremePoint;
extremePoint = curr.low;
acceleration = step;
} else {
// Continue uptrend
if (currentHigh > ep) {
ep = currentHigh;
af = Math.min(af + step, maximum);
}
// Ensure SAR doesn't go above previous two lows
if (i >= 2) {
sar = Math.min(sar, ohlcv[i - 1].low, ohlcv[i - 2].low);
if (curr.high > extremePoint) {
extremePoint = curr.high;
acceleration = Math.min(acceleration + step, maxStep);
}
// Ensure SAR doesn't exceed previous lows
sar = Math.min(sar, prev.low, i > 1 ? ohlcv[i - 2].low : prev.low);
}
} else {
// Downtrend logic
if (currentHigh >= sar) {
} else { // Downtrend
if (curr.high >= sar) {
// Trend reversal
isUptrend = true;
sar = ep;
ep = currentHigh;
af = step;
trend = 1;
sar = extremePoint;
extremePoint = curr.high;
acceleration = step;
} else {
// Continue downtrend
if (currentLow < ep) {
ep = currentLow;
af = Math.min(af + step, maximum);
}
// Ensure SAR doesn't go below previous two highs
if (i >= 2) {
sar = Math.max(sar, ohlcv[i - 1].high, ohlcv[i - 2].high);
if (curr.low < extremePoint) {
extremePoint = curr.low;
acceleration = Math.min(acceleration + step, maxStep);
}
// Ensure SAR doesn't exceed previous highs
sar = Math.max(sar, prev.high, i > 1 ? ohlcv[i - 2].high : prev.high);
}
}
@ -468,3 +462,38 @@ export function parabolicSAR(
return result;
}
/**
* Aroon Indicator
*/
export function aroon(ohlcv: OHLCVData[], period: number = 14): { up: number[], down: number[] } {
if (period >= ohlcv.length) return { up: [], down: [] };
const up: number[] = [];
const down: number[] = [];
for (let i = period - 1; i < ohlcv.length; i++) {
const slice = ohlcv.slice(i - period + 1, i + 1);
// Find highest high and lowest low positions
let highestIndex = 0;
let lowestIndex = 0;
for (let j = 1; j < slice.length; j++) {
if (slice[j].high > slice[highestIndex].high) {
highestIndex = j;
}
if (slice[j].low < slice[lowestIndex].low) {
lowestIndex = j;
}
}
const aroonUp = ((period - 1 - highestIndex) / (period - 1)) * 100;
const aroonDown = ((period - 1 - lowestIndex) / (period - 1)) * 100;
up.push(aroonUp);
down.push(aroonDown);
}
return { up, down };
}

View file

@ -352,38 +352,51 @@ export function estimateHestonParameters(
): HestonParameters {
const n = returns.length;
if (n < 10) {
throw new Error('Need at least 10 observations for Heston parameter estimation');
}
// Initial parameter estimates
let kappa = 2.0; // Mean reversion speed
let theta = 0.04; // Long-term variance
let sigma = 0.3; // Vol of vol
let sigma = 0.3; // Volatility of variance
let rho = -0.5; // Correlation
let v0 = 0.04; // Initial variance
// Calculate sample statistics for initialization
const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n;
const sampleVariance = returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1);
theta = sampleVariance;
v0 = sampleVariance;
let logLikelihood = -Infinity;
for (let iter = 0; iter < maxIterations; iter++) {
const variances: number[] = [v0];
let newLogLikelihood = 0;
// Euler discretization of Heston model
const dt = 1 / 252; // Daily time step
let currentVariance = v0;
for (let t = 1; t < n; t++) {
const prevVar = Math.max(variances[t - 1], 1e-8);
const sqrtVar = Math.sqrt(prevVar);
const dt = 1.0; // Assuming daily data
const prevReturn = returns[t - 1];
// Simulate variance process (simplified)
const dW2 = Math.random() - 0.5; // Should be proper random normal
const newVar = prevVar + kappa * (theta - prevVar) * dt + sigma * sqrtVar * Math.sqrt(dt) * dW2;
variances.push(Math.max(newVar, 1e-8));
// Euler discretization of variance process
const dW1 = Math.random() - 0.5; // Simplified random shock
const dW2 = rho * dW1 + Math.sqrt(1 - rho * rho) * (Math.random() - 0.5);
// Calculate likelihood contribution
const expectedReturn = 0; // Assuming zero drift for simplicity
const variance = prevVar;
const actualReturn = returns[t];
const varianceChange = kappa * (theta - currentVariance) * dt +
sigma * Math.sqrt(Math.max(currentVariance, 0)) * dW2;
newLogLikelihood -= 0.5 * (Math.log(2 * Math.PI) + Math.log(variance) +
Math.pow(actualReturn - expectedReturn, 2) / variance);
currentVariance = Math.max(currentVariance + varianceChange, 0.001);
// Log-likelihood contribution (simplified)
const expectedReturn = meanReturn;
const variance = currentVariance;
if (variance > 0) {
newLogLikelihood -= 0.5 * Math.log(2 * Math.PI * variance);
newLogLikelihood -= 0.5 * Math.pow(returns[t] - expectedReturn, 2) / variance;
}
}
// Check for convergence
@ -393,12 +406,13 @@ export function estimateHestonParameters(
logLikelihood = newLogLikelihood;
// Simple parameter update (in practice, use proper optimization)
kappa = Math.max(0.1, Math.min(10, kappa + 0.01));
theta = Math.max(0.001, Math.min(1, theta + 0.001));
sigma = Math.max(0.01, Math.min(2, sigma + 0.01));
rho = Math.max(-0.99, Math.min(0.99, rho + 0.01));
v0 = Math.max(0.001, Math.min(1, v0 + 0.001));
// Simple parameter updates (in practice, use maximum likelihood estimation)
const learningRate = 0.001;
kappa = Math.max(0.1, Math.min(10, kappa + learningRate));
theta = Math.max(0.001, Math.min(1, theta + learningRate));
sigma = Math.max(0.01, Math.min(2, sigma + learningRate));
rho = Math.max(-0.99, Math.min(0.99, rho + learningRate * 0.1));
v0 = Math.max(0.001, Math.min(1, v0 + learningRate));
}
return {
@ -458,3 +472,48 @@ export function calculateVolatilityRisk(
volatilityVolatility
};
}
/**
* Fix Yang-Zhang volatility calculation
*/
export function calculateYangZhangVolatility(
ohlcv: OHLCVData[],
annualizationFactor: number = 252
): number {
if (ohlcv.length < 2) {
throw new Error('Need at least 2 observations for Yang-Zhang volatility calculation');
}
const n = ohlcv.length;
let overnightSum = 0;
let openToCloseSum = 0;
let rogersSatchellSum = 0;
for (let i = 1; i < n; i++) {
const prev = ohlcv[i - 1];
const curr = ohlcv[i];
// Overnight return (close to open)
const overnight = Math.log(curr.open / prev.close);
overnightSum += overnight * overnight;
// Open to close return
const openToClose = Math.log(curr.close / curr.open);
openToCloseSum += openToClose * openToClose;
// Rogers-Satchell component
const logHighOpen = Math.log(curr.high / curr.open);
const logHighClose = Math.log(curr.high / curr.close);
const logLowOpen = Math.log(curr.low / curr.open);
const logLowClose = Math.log(curr.low / curr.close);
rogersSatchellSum += logHighOpen * logHighClose + logLowOpen * logLowClose;
}
// Yang-Zhang estimator
const k = 0.34 / (1.34 + (n + 1) / (n - 1)); // Drift adjustment factor
const yangZhangVariance = overnightSum / (n - 1) +
k * openToCloseSum / (n - 1) +
(1 - k) * rogersSatchellSum / (n - 1);
return Math.sqrt(yangZhangVariance * annualizationFactor);
}