stock-bot/libs/utils/src/calculations/volatility-models.ts
2025-06-13 13:38:02 -04:00

617 lines
18 KiB
TypeScript

/**
* Volatility Models
* Advanced volatility modeling and forecasting tools
*/
// Local interface definition to avoid circular dependency
interface OHLCVData {
open: number;
high: number;
low: number;
close: number;
volume: number;
timestamp: Date;
}
export interface GARCHParameters {
omega: number; // Constant term
alpha: number; // ARCH parameter
beta: number; // GARCH parameter
logLikelihood: number;
aic: number;
bic: number;
}
export interface VolatilityEstimates {
closeToClose: number;
parkinson: number;
garmanKlass: number;
rogersSatchell: number;
yangZhang: number;
}
export interface VolatilityRegime {
regime: number;
startDate: Date;
endDate: Date;
averageVolatility: number;
observations: number;
}
export interface VolatilityTerm {
maturity: number; // Days to maturity
impliedVolatility: number;
confidence: number;
}
export interface HestonParameters {
kappa: number; // Mean reversion speed
theta: number; // Long-term variance
sigma: number; // Volatility of variance
rho: number; // Correlation
v0: number; // Initial variance
logLikelihood: number;
}
/**
* Calculate realized volatility using different estimators
*/
export function calculateRealizedVolatility(
ohlcv: OHLCVData[],
annualizationFactor: number = 252
): VolatilityEstimates {
if (ohlcv.length < 2) {
throw new Error('Need at least 2 observations for volatility calculation');
}
const n = ohlcv.length;
let closeToCloseSum = 0;
let parkinsonSum = 0;
let garmanKlassSum = 0;
let rogersSatchellSum = 0;
let yangZhangSum = 0;
// Calculate log returns and volatility estimators
for (let i = 1; i < n; i++) {
const prev = ohlcv[i - 1];
const curr = ohlcv[i];
// Close-to-close
const logReturn = Math.log(curr.close / prev.close);
closeToCloseSum += logReturn * logReturn;
// Parkinson estimator
const logHighLow = Math.log(curr.high / curr.low);
parkinsonSum += logHighLow * logHighLow;
// Garman-Klass estimator
const logOpenClose = Math.log(curr.close / curr.open);
garmanKlassSum +=
0.5 * logHighLow * logHighLow - (2 * Math.log(2) - 1) * logOpenClose * logOpenClose;
// Rogers-Satchell estimator
const logHighOpen = Math.log(curr.high / curr.open);
const logHighClose = Math.log(curr.high / curr.close);
const logLowOpen = Math.log(curr.low / curr.open);
const logLowClose = Math.log(curr.low / curr.close);
rogersSatchellSum += logHighOpen * logHighClose + logLowOpen * logLowClose;
// Yang-Zhang estimator components
const overnight = Math.log(curr.open / prev.close);
yangZhangSum += overnight * overnight + rogersSatchellSum / i; // Simplified for brevity
}
return {
closeToClose: Math.sqrt((closeToCloseSum / (n - 1)) * annualizationFactor),
parkinson: Math.sqrt((parkinsonSum / (n - 1) / (4 * Math.log(2))) * annualizationFactor),
garmanKlass: Math.sqrt((garmanKlassSum / (n - 1)) * annualizationFactor),
rogersSatchell: Math.sqrt((rogersSatchellSum / (n - 1)) * annualizationFactor),
yangZhang: Math.sqrt((yangZhangSum / (n - 1)) * annualizationFactor),
};
}
/**
* Estimate GARCH(1,1) model parameters
*/
export function estimateGARCH(
returns: number[],
maxIterations: number = 100,
tolerance: number = 1e-6
): GARCHParameters {
const n = returns.length;
// Initial parameter estimates
let omega = 0.01;
let alpha = 0.05;
let beta = 0.9;
// Calculate unconditional variance
const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n;
const unconditionalVar =
returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1);
let logLikelihood = -Infinity;
for (let iter = 0; iter < maxIterations; iter++) {
const variances: number[] = [unconditionalVar];
let newLogLikelihood = 0;
// Calculate conditional variances
for (let t = 1; t < n; t++) {
const prevVar = variances[t - 1];
const prevReturn = returns[t - 1] - meanReturn;
const currentVar = omega + alpha * prevReturn * prevReturn + beta * prevVar;
variances.push(Math.max(currentVar, 1e-8)); // Ensure positive variance
// Add to log-likelihood
const currentReturn = returns[t] - meanReturn;
newLogLikelihood -=
0.5 *
(Math.log(2 * Math.PI) +
Math.log(currentVar) +
(currentReturn * currentReturn) / currentVar);
}
// Check for convergence
if (Math.abs(newLogLikelihood - logLikelihood) < tolerance) {
break;
}
logLikelihood = newLogLikelihood;
// Simple gradient update (in practice, use more sophisticated optimization)
const gradientStep = 0.001;
omega = Math.max(0.001, omega + gradientStep);
alpha = Math.max(0.001, Math.min(0.999, alpha + gradientStep));
beta = Math.max(0.001, Math.min(0.999 - alpha, beta + gradientStep));
}
// Calculate information criteria
const k = 3; // Number of parameters
const aic = -2 * logLikelihood + 2 * k;
const bic = -2 * logLikelihood + k * Math.log(n);
return {
omega,
alpha,
beta,
logLikelihood,
aic,
bic,
};
}
/**
* Calculate EWMA volatility
*/
export function calculateEWMAVolatility(
returns: number[],
lambda: number = 0.94,
annualizationFactor: number = 252
): number[] {
const n = returns.length;
const volatilities: number[] = [];
// Initialize with sample variance
const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n;
let variance = returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1);
for (let t = 0; t < n; t++) {
if (t > 0) {
const prevReturn = returns[t - 1] - meanReturn;
variance = lambda * variance + (1 - lambda) * prevReturn * prevReturn;
}
volatilities.push(Math.sqrt(variance * annualizationFactor));
}
return volatilities;
}
/**
* Identify volatility regimes
*/
export function identifyVolatilityRegimes(
returns: number[],
numRegimes: number = 3,
windowSize: number = 60
): VolatilityRegime[] {
// Calculate rolling volatility
const rollingVol: number[] = [];
const timestamps: Date[] = [];
for (let i = windowSize - 1; i < returns.length; i++) {
const window = returns.slice(i - windowSize + 1, i + 1);
const mean = window.reduce((sum, r) => sum + r, 0) / window.length;
const variance =
window.reduce((sum, r) => sum + Math.pow(r - mean, 2), 0) / (window.length - 1);
rollingVol.push(Math.sqrt(variance * 252)); // Annualized
timestamps.push(new Date(Date.now() + i * 24 * 60 * 60 * 1000)); // Mock timestamps
}
// Simple k-means clustering on absolute returns
const absReturns = returns.map(ret => Math.abs(ret));
const sortedReturns = [...absReturns].sort((a, b) => a - b);
// Define regime thresholds
const thresholds: number[] = [];
for (let i = 1; i < numRegimes; i++) {
const index = Math.floor((i / numRegimes) * sortedReturns.length);
thresholds.push(sortedReturns[index]);
}
// Classify returns into regimes
const regimeSequence = absReturns.map(absRet => {
for (let i = 0; i < thresholds.length; i++) {
if (absRet <= thresholds[i]) {return i;}
}
return numRegimes - 1;
});
// Calculate regime statistics
const regimes: VolatilityRegime[] = [];
for (let regime = 0; regime < numRegimes; regime++) {
const regimeIndices = regimeSequence
.map((r, idx) => (r === regime ? idx : -1))
.filter(idx => idx !== -1);
if (regimeIndices.length > 0) {
const regimeVolatilities = regimeIndices.map(idx =>
idx < rollingVol.length ? rollingVol[idx] : 0
);
const avgVol =
regimeVolatilities.reduce((sum, vol) => sum + vol, 0) / regimeVolatilities.length;
regimes.push({
regime,
startDate: new Date(Date.now()),
endDate: new Date(Date.now() + regimeIndices.length * 24 * 60 * 60 * 1000),
averageVolatility: avgVol,
observations: regimeIndices.length,
});
}
}
return regimes;
}
/**
* Calculate volatility term structure
*/
export function calculateVolatilityTermStructure(
spotVol: number,
maturities: number[],
meanReversion: number = 0.5
): VolatilityTerm[] {
return maturities.map(maturity => {
// Simple mean reversion model for term structure
const timeToMaturity = maturity / 365; // Convert to years
const termVolatility = spotVol * Math.exp(-meanReversion * timeToMaturity);
return {
maturity,
impliedVolatility: Math.max(termVolatility, 0.01), // Floor at 1%
confidence: Math.exp(-timeToMaturity), // Confidence decreases with maturity
};
});
}
/**
* Calculate volatility smile/skew parameters
*/
export function calculateVolatilitySmile(
strikes: number[],
spotPrice: number,
impliedVols: number[]
): {
atmVolatility: number;
skew: number;
convexity: number;
riskReversal: number;
} {
if (strikes.length !== impliedVols.length || strikes.length < 3) {
throw new Error('Need at least 3 strikes with corresponding implied volatilities');
}
// Find ATM volatility
const atmIndex = strikes.reduce(
(closest, strike, idx) =>
Math.abs(strike - spotPrice) < Math.abs(strikes[closest] - spotPrice) ? idx : closest,
0
);
const atmVolatility = impliedVols[atmIndex];
// Calculate skew (derivative at ATM)
let skew = 0;
if (atmIndex > 0 && atmIndex < strikes.length - 1) {
const deltaStrike = strikes[atmIndex + 1] - strikes[atmIndex - 1];
const deltaVol = impliedVols[atmIndex + 1] - impliedVols[atmIndex - 1];
skew = deltaVol / deltaStrike;
}
// Calculate convexity (second derivative)
let convexity = 0;
if (atmIndex > 0 && atmIndex < strikes.length - 1) {
const h = strikes[atmIndex + 1] - strikes[atmIndex];
convexity =
(impliedVols[atmIndex + 1] - 2 * impliedVols[atmIndex] + impliedVols[atmIndex - 1]) / (h * h);
}
// Risk reversal (put-call vol difference)
const otmPutIndex = strikes.findIndex(strike => strike < spotPrice * 0.9);
const otmCallIndex = strikes.findIndex(strike => strike > spotPrice * 1.1);
let riskReversal = 0;
if (otmPutIndex !== -1 && otmCallIndex !== -1) {
riskReversal = impliedVols[otmCallIndex] - impliedVols[otmPutIndex];
}
return {
atmVolatility,
skew,
convexity,
riskReversal,
};
}
/**
* Estimate Heston stochastic volatility model parameters
*/
export function estimateHestonParameters(
returns: number[],
maxIterations: number = 100
): HestonParameters {
const n = returns.length;
if (n < 10) {
throw new Error('Need at least 10 observations for Heston parameter estimation');
}
// Initial parameter estimates
let kappa = 2.0; // Mean reversion speed
let theta = 0.04; // Long-term variance
let sigma = 0.3; // Volatility of variance
let rho = -0.5; // Correlation
let v0 = 0.04; // Initial variance
// Calculate sample statistics for initialization
const meanReturn = returns.reduce((sum, r) => sum + r, 0) / n;
const sampleVariance = returns.reduce((sum, r) => sum + Math.pow(r - meanReturn, 2), 0) / (n - 1);
theta = sampleVariance;
v0 = sampleVariance;
let logLikelihood = -Infinity;
for (let iter = 0; iter < maxIterations; iter++) {
let newLogLikelihood = 0;
let currentVariance = v0;
for (let t = 1; t < n; t++) {
const dt = 1.0; // Assuming daily data
const prevReturn = returns[t - 1];
// Euler discretization of variance process
const dW1 = Math.random() - 0.5; // Simplified random shock
const dW2 = rho * dW1 + Math.sqrt(1 - rho * rho) * (Math.random() - 0.5);
const varianceChange =
kappa * (theta - currentVariance) * dt +
sigma * Math.sqrt(Math.max(currentVariance, 0)) * dW2;
currentVariance = Math.max(currentVariance + varianceChange, 0.001);
// Log-likelihood contribution (simplified)
const expectedReturn = meanReturn;
const variance = currentVariance;
if (variance > 0) {
newLogLikelihood -= 0.5 * Math.log(2 * Math.PI * variance);
newLogLikelihood -= (0.5 * Math.pow(returns[t] - expectedReturn, 2)) / variance;
}
}
// Check for convergence
if (Math.abs(newLogLikelihood - logLikelihood) < 1e-6) {
break;
}
logLikelihood = newLogLikelihood;
// Simple parameter updates (in practice, use maximum likelihood estimation)
const learningRate = 0.001;
kappa = Math.max(0.1, Math.min(10, kappa + learningRate));
theta = Math.max(0.001, Math.min(1, theta + learningRate));
sigma = Math.max(0.01, Math.min(2, sigma + learningRate));
rho = Math.max(-0.99, Math.min(0.99, rho + learningRate * 0.1));
v0 = Math.max(0.001, Math.min(1, v0 + learningRate));
}
return {
kappa,
theta,
sigma,
rho,
v0,
logLikelihood,
};
}
/**
* Calculate volatility risk metrics
*/
export function calculateVolatilityRisk(
returns: number[],
confidenceLevel: number = 0.05
): {
volatilityVaR: number;
expectedShortfall: number;
maxVolatility: number;
volatilityVolatility: number;
} {
// Calculate rolling volatilities
const windowSize = 30;
const volatilities: number[] = [];
for (let i = windowSize - 1; i < returns.length; i++) {
const window = returns.slice(i - windowSize + 1, i + 1);
const mean = window.reduce((sum, r) => sum + r, 0) / window.length;
const variance =
window.reduce((sum, r) => sum + Math.pow(r - mean, 2), 0) / (window.length - 1);
volatilities.push(Math.sqrt(variance * 252)); // Annualized
}
// Sort volatilities for VaR calculation
const sortedVols = [...volatilities].sort((a, b) => b - a); // Descending order
const varIndex = Math.floor(confidenceLevel * sortedVols.length);
const volatilityVaR = sortedVols[varIndex];
// Expected shortfall (average of worst volatilities)
const esVols = sortedVols.slice(0, varIndex + 1);
const expectedShortfall = esVols.reduce((sum, vol) => sum + vol, 0) / esVols.length;
// Maximum volatility
const maxVolatility = Math.max(...volatilities);
// Volatility of volatility
const meanVol = volatilities.reduce((sum, vol) => sum + vol, 0) / volatilities.length;
const volVariance =
volatilities.reduce((sum, vol) => sum + Math.pow(vol - meanVol, 2), 0) /
(volatilities.length - 1);
const volatilityVolatility = Math.sqrt(volVariance);
return {
volatilityVaR,
expectedShortfall,
maxVolatility,
volatilityVolatility,
};
}
/**
* Fix Yang-Zhang volatility calculation
*/
export function calculateYangZhangVolatility(
ohlcv: OHLCVData[],
annualizationFactor: number = 252
): number {
if (ohlcv.length < 2) {
throw new Error('Need at least 2 observations for Yang-Zhang volatility calculation');
}
const n = ohlcv.length;
let overnightSum = 0;
let openToCloseSum = 0;
let rogersSatchellSum = 0;
for (let i = 1; i < n; i++) {
const prev = ohlcv[i - 1];
const curr = ohlcv[i];
// Overnight return (close to open)
const overnight = Math.log(curr.open / prev.close);
overnightSum += overnight * overnight;
// Open to close return
const openToClose = Math.log(curr.close / curr.open);
openToCloseSum += openToClose * openToClose;
// Rogers-Satchell component
const logHighOpen = Math.log(curr.high / curr.open);
const logHighClose = Math.log(curr.high / curr.close);
const logLowOpen = Math.log(curr.low / curr.open);
const logLowClose = Math.log(curr.low / curr.close);
rogersSatchellSum += logHighOpen * logHighClose + logLowOpen * logLowClose;
}
// Yang-Zhang estimator
const k = 0.34 / (1.34 + (n + 1) / (n - 1)); // Drift adjustment factor
const yangZhangVariance =
overnightSum / (n - 1) +
(k * openToCloseSum) / (n - 1) +
((1 - k) * rogersSatchellSum) / (n - 1);
return Math.sqrt(yangZhangVariance * annualizationFactor);
}
/**
* Parkinson volatility estimator
*/
export function parkinsonVolatility(ohlcv: OHLCVData[], annualizationFactor: number = 252): number {
if (ohlcv.length < 2) {return 0;}
const sum = ohlcv.slice(1).reduce((acc, curr) => {
const range = Math.log(curr.high / curr.low);
return acc + range * range;
}, 0);
return Math.sqrt((sum / (ohlcv.length - 1)) * annualizationFactor);
}
/**
* Black-Scholes option pricing model
*/
function blackScholes(
spotPrice: number,
strikePrice: number,
timeToExpiry: number,
volatility: number,
riskFreeRate: number,
optionType: 'call' | 'put'
): number {
const d1 =
(Math.log(spotPrice / strikePrice) +
(riskFreeRate + 0.5 * volatility * volatility) * timeToExpiry) /
(volatility * Math.sqrt(timeToExpiry));
const d2 = d1 - volatility * Math.sqrt(timeToExpiry);
if (optionType === 'call') {
return (
spotPrice * normalCDF(d1) -
strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(d2)
);
} else {
return (
strikePrice * Math.exp(-riskFreeRate * timeToExpiry) * normalCDF(-d2) -
spotPrice * normalCDF(-d1)
);
}
}
/**
* Normal cumulative distribution function
*/
function normalCDF(x: number): number {
const a1 = 0.254829592;
const a2 = -0.284496736;
const a3 = 1.421060743;
const a4 = -1.453152027;
const a5 = 1.061405429;
const p = 0.3275911;
const sign = x < 0 ? -1 : 1;
const absX = Math.abs(x);
const t = 1 / (1 + p * absX);
const y =
1 -
(a1 * t + a2 * t * t + a3 * t * t * t + a4 * t * t * t * t + a5 * t * t * t * t * t) *
Math.exp((-absX * absX) / 2);
return 0.5 * (1 + sign * y);
}
/**
* Forecast volatility using EWMA
*/
export function forecastVolatilityEWMA(
volatilities: number[],
lambda: number = 0.94,
forecastHorizon: number = 1
): number {
if (volatilities.length === 0) {
return 0;
}
let forecast = volatilities[volatilities.length - 1];
for (let i = 0; i < forecastHorizon; i++) {
forecast = lambda * forecast + (1 - lambda) * forecast; // Using the last value as the long-term average
}
return forecast;
}